diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 82c9416515d24..e129cdaa12469 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -229,7 +229,7 @@ private static Page page(String operation) { case "mv_min", "mv_min_ascending" -> { var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); if (operation.endsWith("ascending")) { - builder.mvOrdering(Block.MvOrdering.ASCENDING); + builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } for (int i = 0; i < BLOCK_LENGTH; i++) { builder.beginPositionEntry(); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index 9ef4eef2a6924..84f7cec47b737 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -107,6 +108,7 @@ private static Operator operator(String data, int topCount) { ClusterSettings.createBuiltInClusterSettings() ); return new TopNOperator( + BlockFactory.getNonBreakingInstance(), breakerService.getBreaker(CircuitBreaker.REQUEST), topCount, elementTypes, diff --git a/docs/changelog/100018.yaml b/docs/changelog/100018.yaml new file mode 100644 index 0000000000000..b39089db568c0 --- /dev/null +++ b/docs/changelog/100018.yaml @@ -0,0 +1,5 @@ +pr: 100018 +summary: Improve time-series error and documentation +area: "TSDB" +type: enhancement +issues: [] diff --git a/docs/changelog/100020.yaml b/docs/changelog/100020.yaml new file mode 100644 index 0000000000000..9f97778860eef --- /dev/null +++ b/docs/changelog/100020.yaml @@ -0,0 +1,6 @@ +pr: 100020 +summary: "[CI] `SearchResponseTests#testSerialization` failing resolved" +area: Search +type: bug +issues: + - 100005 diff --git a/docs/changelog/99584.yaml b/docs/changelog/99584.yaml new file mode 100644 index 0000000000000..229e3d8024506 --- /dev/null +++ b/docs/changelog/99584.yaml @@ -0,0 +1,5 @@ +pr: 99584 +summary: Adding an option for trained models to be platform specific +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/99832.yaml b/docs/changelog/99832.yaml new file mode 100644 index 0000000000000..9bd83591ba920 --- /dev/null +++ b/docs/changelog/99832.yaml @@ -0,0 +1,5 @@ +pr: 99832 +summary: APM Metering API +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/99947.yaml b/docs/changelog/99947.yaml new file mode 100644 index 0000000000000..61996c8fde92b --- /dev/null +++ b/docs/changelog/99947.yaml @@ -0,0 +1,5 @@ +pr: 99947 +summary: GET `_data_stream` displays both ILM and DSL information +area: Data streams +type: feature +issues: [] diff --git a/docs/changelog/99995.yaml b/docs/changelog/99995.yaml new file mode 100644 index 0000000000000..d67cbdaec1f37 --- /dev/null +++ b/docs/changelog/99995.yaml @@ -0,0 +1,6 @@ +pr: 99995 +summary: When a primary is inactive but this is considered expected, the same applies for the replica of this shard. +area: Health +type: enhancement +issues: + - 99951 diff --git a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc index 54638083b1053..d93df55118a8b 100644 --- a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc @@ -67,8 +67,6 @@ PUT /my-time-series-index-0/_bulk -------------------------------------------------- // NOTCONSOLE -////////////////////////// - To perform a time series aggregation, specify "time_series" as the aggregation type. When the boolean "keyed" is true, each bucket is given a unique key. @@ -85,8 +83,6 @@ GET /_search -------------------------------------------------- // NOTCONSOLE -////////////////////////// - This will return all results in the time series, however a more typical query will use sub aggregations to reduce the date returned to something more relevant. diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc index 461addf65c53c..3922ef018a713 100644 --- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc +++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc @@ -573,15 +573,21 @@ stream's oldest backing index. "indices": [ { "index_name": ".ds-my-data-stream-2099.03.07-000001", <1> - "index_uuid": "Gpdiyq8sRuK9WuthvAdFbw" + "index_uuid": "Gpdiyq8sRuK9WuthvAdFbw", + "prefer_ilm": true, + "managed_by": "Unmanaged" }, { "index_name": ".ds-my-data-stream-2099.03.08-000002", - "index_uuid": "_eEfRrFHS9OyhqWntkgHAQ" + "index_uuid": "_eEfRrFHS9OyhqWntkgHAQ", + "prefer_ilm": true, + "managed_by": "Unmanaged" } ], "generation": 2, "status": "GREEN", + "next_generation_managed_by": "Unmanaged", + "prefer_ilm": true, "template": "my-data-stream-template", "hidden": false, "system": false, diff --git a/docs/reference/data-streams/downsampling-manual.asciidoc b/docs/reference/data-streams/downsampling-manual.asciidoc index 6b98816c2cf56..cc74e98b258de 100644 --- a/docs/reference/data-streams/downsampling-manual.asciidoc +++ b/docs/reference/data-streams/downsampling-manual.asciidoc @@ -358,11 +358,15 @@ This returns: "indices": [ { "index_name": ".ds-my-data-stream-2023.07.26-000001", <1> - "index_uuid": "ltOJGmqgTVm4T-Buoe7Acg" + "index_uuid": "ltOJGmqgTVm4T-Buoe7Acg", + "prefer_ilm": true, + "managed_by": "Data stream lifecycle" } ], "generation": 1, "status": "GREEN", + "next_generation_managed_by": "Data stream lifecycle", + "prefer_ilm": true, "template": "my-data-stream-template", "hidden": false, "system": false, diff --git a/docs/reference/images/index-mgmt/management_index_details.png b/docs/reference/images/index-mgmt/management_index_details.png index edf1a6517f6a2..a975b9952ca88 100644 Binary files a/docs/reference/images/index-mgmt/management_index_details.png and b/docs/reference/images/index-mgmt/management_index_details.png differ diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index ef2cf7eeee946..36998e7aa5fa3 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -225,7 +225,7 @@ cluster can not write into this data stream or change its mappings. `lifecycle`:: (object) -Functionality in preview:[]. Contains the configuration for the data stream lifecycle management of this data stream. +Contains the configuration for the data stream lifecycle management of this data stream. + .Properties of `lifecycle` [%collapsible%open] @@ -265,11 +265,17 @@ The API returns the following response: "indices": [ { "index_name": ".ds-my-data-stream-2099.03.07-000001", - "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg" + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" }, { "index_name": ".ds-my-data-stream-2099.03.08-000002", - "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw" + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" } ], "generation": 2, @@ -277,6 +283,8 @@ The API returns the following response: "my-meta-field": "foo" }, "status": "GREEN", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, "template": "my-index-template", "ilm_policy": "my-lifecycle-policy", "hidden": false, @@ -292,7 +300,10 @@ The API returns the following response: "indices": [ { "index_name": ".ds-my-data-stream-two-2099.03.08-000001", - "index_uuid": "3liBu2SYS5axasRt6fUIpA" + "index_uuid": "3liBu2SYS5axasRt6fUIpA", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" } ], "generation": 1, @@ -300,6 +311,8 @@ The API returns the following response: "my-meta-field": "foo" }, "status": "YELLOW", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, "template": "my-index-template", "ilm_policy": "my-lifecycle-policy", "hidden": false, diff --git a/docs/reference/indices/index-mgmt.asciidoc b/docs/reference/indices/index-mgmt.asciidoc index 571f1c813fbfa..b89d2ec957af9 100644 --- a/docs/reference/indices/index-mgmt.asciidoc +++ b/docs/reference/indices/index-mgmt.asciidoc @@ -49,11 +49,11 @@ Badges indicate if an index is a <>, a Clicking a badge narrows the list to only indices of that type. You can also filter indices using the search bar. -You can drill down into each index to investigate the index +By clicking the index name, you can open an index details page to investigate the index <>, <>, and statistics. -From this view, you can also edit the index settings. +On this page, you can also edit the index settings. -To view and explore the documents within an index, click the compass icon image:compassicon.png[width=3%] next to the index name to open {kibana-ref}/discover.html[Discover]. +To view and explore the documents within an index, click the *Discover index* button to open {kibana-ref}/discover.html[Discover]. [role="screenshot"] image::images/index-mgmt/management_index_details.png[Index Management UI] diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc new file mode 100644 index 0000000000000..874bfa64d3551 --- /dev/null +++ b/docs/reference/inference/delete-inference.asciidoc @@ -0,0 +1,57 @@ +[role="xpack"] +[[delete-inference-api]] +=== Delete {infer} API + +Deletes an {infer} model deployment. + + +[discrete] +[[delete-inference-api-request]] +==== {api-request-title} + +`DELETE /_inference//` + +[discrete] +[[delete-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + + +[discrete] +[[delete-inference-api-path-params]] +==== {api-path-parms-title} + +:: +(Required, string) +The unique identifier of the {infer} model to delete. + +:: +(Required, string) +The type of {infer} task that the model performs. + + +[discrete] +[[delete-inference-api-example]] +==== {api-examples-title} + +The following API call deletes the `my-elser-model` {infer} model that can +perform `sparse_embedding` tasks. + + +[source,console] +------------------------------------------------------------ +DELETE /_inference/sparse_embedding/my-elser-model +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + +[source,console-result] +------------------------------------------------------------ +{ + "acknowledged": true +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc new file mode 100644 index 0000000000000..7e32bd05b5f56 --- /dev/null +++ b/docs/reference/inference/get-inference.asciidoc @@ -0,0 +1,79 @@ +[role="xpack"] +[[get-inference-api]] +=== Get {infer} API + +Retrieves {infer} model information. + +[discrete] +[[get-inference-api-request]] +==== {api-request-title} + +`GET /_inference/_all` + +`GET /_inference//_all` + +`GET /_inference//` + +[discrete] +[[get-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + +[discrete] +[[get-inference-api-desc]] +==== {api-description-title} + +You can get information in a single API request for: + +* a single {infer} model by providing the task type and the model ID, +* all of the {infer} models for a certain task type by providing the task type +and a wildcard expression, +* all of the {infer} models by using a wildcard expression. + + +[discrete] +[[get-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) +The unique identifier of the {infer} model. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. + + +[discrete] +[[get-inference-api-example]] +==== {api-examples-title} + +The following API call retrives information about the `my-elser-model` {infer} +model that can perform `sparse_embedding` tasks. + + +[source,console] +------------------------------------------------------------ +GET _inference/sparse_embedding/my-elser-model +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + +[source,console-result] +------------------------------------------------------------ +{ + "model_id": "my-elser-model", + "task_type": "sparse_embedding", + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc new file mode 100644 index 0000000000000..ec1f01bc4d093 --- /dev/null +++ b/docs/reference/inference/inference-apis.asciidoc @@ -0,0 +1,16 @@ +[role="xpack"] +[[inference-apis]] +== {infer-cap} APIs + +You can use the following APIs to manage {infer} models and perform {infer}: + +* <> +* <> +* <> +* <> + + +include::delete-inference.asciidoc[] +include::get-inference.asciidoc[] +include::post-inference.asciidoc[] +include::put-inference.asciidoc[] \ No newline at end of file diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc new file mode 100644 index 0000000000000..99dd4a059519f --- /dev/null +++ b/docs/reference/inference/post-inference.asciidoc @@ -0,0 +1,97 @@ +[role="xpack"] +[[post-inference-api]] +=== Perform inference API + +Performs an inference task on an input text by using an {infer} model. + + +[discrete] +[[post-inference-api-request]] +==== {api-request-title} + +`POST /_inference//` + + +[discrete] +[[post-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + + +[discrete] +[[post-inference-api-desc]] +==== {api-description-title} + +The perform {infer} API enables you to use {infer} models to perform specific +tasks on data that you provide as an input. The API returns a response with the +resutls of the tasks. The {infer} model you use can perform one specific task +that has been defined when the model was created with the <>. + + +[discrete] +[[post-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} model. + + +``:: +(Required, string) +The type of {infer} task that the model performs. + + +[discrete] +[[post-inference-api-request-body]] +== {api-request-body-title} + +`input`:: +(Required, string) +The text on which you want to perform the {infer} task. + + +[discrete] +[[post-inference-api-example]] +==== {api-examples-title} + +The following example performs sparse embedding on the example sentence. + + +[source,console] +------------------------------------------------------------ +POST _inference/sparse_embedding/my-elser-model +{ + "input": "The sky above the port was the color of television tuned to a dead channel." +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + + +[source,console-result] +------------------------------------------------------------ +{ + "sparse_embedding": { + "port": 2.1259406, + "sky": 1.7073475, + "color": 1.6922266, + "dead": 1.6247464, + "television": 1.3525393, + "above": 1.2425821, + "tuned": 1.1440028, + "colors": 1.1218185, + "tv": 1.0111054, + "ports": 1.0067928, + "poem": 1.0042328, + "channel": 0.99471164, + "tune": 0.96235967, + "scene": 0.9020516, + (...) + } +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc new file mode 100644 index 0000000000000..c5ccd6a57a8dd --- /dev/null +++ b/docs/reference/inference/put-inference.asciidoc @@ -0,0 +1,104 @@ +[role="xpack"] +[[put-inference-api]] +=== Create {infer} API + +Creates a model to perform an {infer} task. + + +[discrete] +[[put-inference-api-request]] +==== {api-request-title} + +`PUT /_inference//` + + +[discrete] +[[put-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + +[discrete] +[[put-inference-api-desc]] +==== {api-description-title} + +The create {infer} API enables you to create and configure an {infer} model to +perform a specific {infer} task. + + +[discrete] +[[put-inference-api-path-params]] +==== {api-path-parms-title} + + +``:: +(Required, string) +The unique identifier of the model. + +``:: +(Required, string) +The type of the {infer} task that the model will perform. Available task types: +* `sparse_embedding`, +* `text_embedding`. + + +[discrete] +[[put-inference-api-request-body]] +== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. +Available services: +* `elser`, +* `elser_mlnode`. + +`service_settings`:: +(Required, object) +Settings used to install the {infer} model. These settings are specific to the +`service` you specified. + +`task_settings`:: +(Optional, object) +Settings to configure the {infer} task. These settings are specific to the +`` you specified. + + +[discrete] +[[put-inference-api-example]] +==== {api-examples-title} + +The following example shows how to create an {infer} model called +`my-elser-model` to perform a `sparse_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-model +{ + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +Example response: + +[source,console-result] +------------------------------------------------------------ +{ + "model_id": "my-elser-model", + "task_type": "sparse_embedding", + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// NOTCONSOLE diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 82fd1872e6a76..7da46e13a8ce4 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -3,7 +3,9 @@ = Create trained models API [subs="attributes"] ++++ + Create trained models + ++++ Creates a trained model. @@ -1645,6 +1647,16 @@ Appropriate types are: * `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}]. -- +`platform_architecture`:: +(Optional, string) +If the model only works on one platform, because it is heavily +optimized for a particular processor architecture and OS combination, +then this field specifies which. The format of the string must match +the platform identifiers used by Elasticsearch, so one of, `linux-x86_64`, +`linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. +For portable models (those that work independent of processor architecture or +OS features), leave this field unset. + `tags`:: (Optional, string) diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 8e1023c47b929..4ec8c203bbef9 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -1932,3 +1932,73 @@ Refer to <>. === Configure roles and users for remote clusters Refer to <>. + +[role="exclude",id="ingest-pipeline-search"] +=== Ingest pipelines for Search indices + +coming::[8.11.0] + +[role="exclude",id="ingest-pipeline-search-inference"] +=== Inference processing for Search indices + +coming::[8.11.0] + +[id="ingest-pipeline-search-inference-update-mapping"] +==== Update mapping + +coming::[8.11.0] + +[role="exclude",id="nlp-example"] +=== Tutorial: Natural language processing (NLP) + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-overview"] +=== Elastic Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-start"] +=== Get started with Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-api"] +=== Behavioral Analytics APIs + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-event"] +=== View Behavioral Analytics Events + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-event-reference"] +=== Behavioral Analytics events reference + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-cors"] +=== Set up CORS for Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="search-application-overview"] +=== Elastic Search Applications + +coming::[8.11.0] + +[role="exclude",id="search-application-api"] +=== Search Applications search API and templates + +coming::[8.11.0] + +[role="exclude",id="search-application-client"] +=== Search Applications client + +coming::[8.11.0] + +[role="exclude",id="search-application-security"] +=== Search Applications security + +coming::[8.11.0] diff --git a/docs/reference/release-notes/8.9.2.asciidoc b/docs/reference/release-notes/8.9.2.asciidoc index d4244eab27645..6b00405261daf 100644 --- a/docs/reference/release-notes/8.9.2.asciidoc +++ b/docs/reference/release-notes/8.9.2.asciidoc @@ -3,6 +3,25 @@ Also see <>. +[float] +[[security-updates-8.9.2]] +=== Security updates + +* {es} generally filters out sensitive information and credentials before +logging to the audit log. It was found that this filtering was not applied when +requests to {es} use certain deprecated `_xpack/security` URIs for APIs. The +impact of this flaw is that sensitive information, such as passwords and tokens, +might be printed in cleartext in {es} audit logs. Note that audit logging is +disabled by default and needs to be explicitly enabled. Even when audit logging +is enabled, request bodies that could contain sensitive information are not +printed to the audit log unless explicitly configured. ++ +The issue is resolved in {es} 8.9.2. ++ +For more information, see our related +https://discuss.elastic.co/t/elasticsearch-8-9-2-and-7-17-13-security-update/342479[security +announcement]. + [[bug-8.9.2]] [float] === Bug fixes diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 1da39333db43e..b8ad9d9a0736e 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -28,8 +28,9 @@ not be included yet. * <> * <> * <> -* <> +* <> * <> +* <> * <> * <> * <> @@ -74,8 +75,9 @@ include::{es-repo-dir}/text-structure/apis/find-structure.asciidoc[leveloffset=+ include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/indices.asciidoc[] include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] -include::{es-repo-dir}/ingest/apis/index.asciidoc[] +include::{es-repo-dir}/inference/inference-apis.asciidoc[] include::info.asciidoc[] +include::{es-repo-dir}/ingest/apis/index.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/rest-api/logstash/index.asciidoc[] include::{es-repo-dir}/ml/common/apis/index.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 0f07f1f4128fe..082bb2ae2e020 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -14,7 +14,7 @@ The instructions in this tutorial shows you how to use ELSER to perform semantic search on your data. NOTE: Only the first 512 extracted tokens per field are considered during -semantic search with ELSER v1. Refer to +semantic search with ELSER. Refer to {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more information. @@ -44,15 +44,16 @@ you must provide suitably sized nodes yourself. First, the mapping of the destination index - the index that contains the tokens that the model created based on your text - must be created. The destination -index must have a field with the <> field type -to index the ELSER output. +index must have a field with the +<> field type to index the +ELSER output. -NOTE: ELSER output must be ingested into a field with the `rank_features` field -type. Otherwise, {es} interprets the token-weight pairs as a massive amount of -fields in a document. If you get an error similar to this +NOTE: ELSER output must be ingested into a field with the `sparse_vector` or +`rank_features` field type. Otherwise, {es} interprets the token-weight pairs as +a massive amount of fields in a document. If you get an error similar to this `"Limit of total fields [1000] has been exceeded while adding new fields"` then the ELSER output field is not mapped properly and it has a field type different -than `rank_features`. +than `sparse_vector` or `rank_features`. [source,console] ---- @@ -61,7 +62,7 @@ PUT my-index "mappings": { "properties": { "ml.tokens": { <1> - "type": "rank_features" <2> + "type": "sparse_vector" <2> }, "text": { <3> "type": "text" <4> @@ -72,7 +73,7 @@ PUT my-index ---- // TEST[skip:TBD] <1> The name of the field to contain the generated tokens. -<2> The field to contain the tokens is a `rank_features` field. +<2> The field to contain the tokens is a `sparse_vector` field. <3> The name of the field from which to create the sparse vector representation. In this example, the name of the field is `text`. <4> The field type which is text in this example. @@ -90,12 +91,12 @@ that is being ingested in the pipeline. [source,console] ---- -PUT _ingest/pipeline/elser-v1-test +PUT _ingest/pipeline/elser-v2-test { "processors": [ { "inference": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "target_field": "ml", "field_map": { <1> "text": "text_field" @@ -155,7 +156,7 @@ POST _reindex?wait_for_completion=false }, "dest": { "index": "my-index", - "pipeline": "elser-v1-test" + "pipeline": "elser-v2-test" } } ---- @@ -192,7 +193,7 @@ GET my-index/_search "query":{ "text_expansion":{ "ml.tokens":{ - "model_id":".elser_model_1", + "model_id":".elser_model_2", "model_text":"How to avoid muscle soreness after running?" } } @@ -236,7 +237,7 @@ weights. "exercises":0.36694175, (...) }, - "model_id":".elser_model_1" + "model_id":".elser_model_2" } } }, @@ -276,7 +277,7 @@ GET my-index/_search "text_expansion": { "ml.tokens": { "model_text": "How to avoid muscle soreness after running?", - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "boost": 1 <2> } } @@ -342,7 +343,7 @@ PUT my-index }, "properties": { "ml.tokens": { - "type": "rank_features" + "type": "sparse_vector" }, "text": { "type": "text" @@ -359,7 +360,7 @@ PUT my-index ==== Further reading * {ml-docs}/ml-nlp-elser.html[How to download and deploy ELSER] -* {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[ELSER v1 limitation] +* {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[ELSER limitation] * https://www.elastic.co/blog/may-2023-launch-information-retrieval-elasticsearch-ai-model[Improving information retrieval in the Elastic Stack: Introducing Elastic Learned Sparse Encoder, our new retrieval model] [discrete] diff --git a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc index 228b7a9202341..0228078e8ce39 100644 --- a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc @@ -1,15 +1,15 @@ // tag::elser[] ELSER produces token-weight pairs as output from the input text and the query. -The {es} <> field type can store these +The {es} <> field type can store these token-weight pairs as numeric feature vectors. The index must have a field with -the `rank_features` field type to index the tokens that ELSER generates. +the `sparse_vector` field type to index the tokens that ELSER generates. To create a mapping for your ELSER index, refer to the <> of the tutorial. The example shows how to create an index mapping for `my-index` that defines the `my_embeddings.tokens` field - which will contain the ELSER output - as a -`rank_features` field. +`sparse_vector` field. [source,console] ---- @@ -18,7 +18,7 @@ PUT my-index "mappings": { "properties": { "my_embeddings.tokens": { <1> - "type": "rank_features" <2> + "type": "sparse_vector" <2> }, "my_text_field": { <3> "type": "text" <4> @@ -28,7 +28,7 @@ PUT my-index } ---- <1> The name of the field that will contain the tokens generated by ELSER. -<2> The field that contains the tokens must be a `rank_features` field. +<2> The field that contains the tokens must be a `sparse_vector` field. <3> The name of the field from which to create the sparse vector representation. In this example, the name of the field is `my_text_field`. <4> The field type is `text` in this example. diff --git a/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc b/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc index 0adfda5c2bff9..786f40fe141bd 100644 --- a/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc @@ -21,7 +21,7 @@ PUT _ingest/pipeline/my-text-embeddings-pipeline "processors": [ { "inference": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "target_field": "my_embeddings", "field_map": { <1> "my_text_field": "text_field" diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 26fc25c2385c8..a99bdf3c8722b 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -22,7 +22,7 @@ GET my-index/_search "query": { "text_expansion": { "my_embeddings.tokens": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "model_text": "the query string" } } diff --git a/docs/reference/tab-widgets/semantic-search/search.asciidoc b/docs/reference/tab-widgets/semantic-search/search.asciidoc index 425b797789270..d1cd31fbe4309 100644 --- a/docs/reference/tab-widgets/semantic-search/search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/search.asciidoc @@ -12,7 +12,7 @@ GET my-index/_search "query":{ "text_expansion":{ "my_embeddings.tokens":{ <1> - "model_id":".elser_model_1", + "model_id":".elser_model_2", "model_text":"the query string" } } @@ -20,7 +20,7 @@ GET my-index/_search } ---- // TEST[skip:TBD] -<1> The field of type `rank_features`. +<1> The field of type `sparse_vector`. // end::elser[] diff --git a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java index b8d1a9a542779..c2b48c4706573 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java +++ b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java @@ -89,7 +89,7 @@ private static void close(boolean success, Releasable... releasables) { * // the resources will be released when reaching here * */ - public static Releasable wrap(final Iterable releasables) { + public static Releasable wrap(final Iterable releasables) { return new Releasable() { @Override public void close() { diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index be59eda4a63c2..935c4958ba3d7 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -27,6 +27,7 @@ import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; import org.elasticsearch.telemetry.apm.internal.APMTelemetryProvider; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -97,13 +98,16 @@ public Collection createComponents( apmAgentSettings.syncAgentSystemProperties(settings); apmAgentSettings.addClusterSettingsListeners(clusterService, telemetryProvider.get()); - return List.of(apmTracer); + final APMMeter apmMeter = telemetryProvider.get().getMeter(); + + return List.of(apmTracer, apmMeter); } @Override public List> getSettings() { return List.of( APMAgentSettings.APM_ENABLED_SETTING, + APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, APMAgentSettings.APM_TRACING_SANITIZE_FIELD_NAMES, diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 75ca94bb13ad6..e4a194ebe0172 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; import java.security.AccessController; @@ -40,14 +41,24 @@ public class APMAgentSettings { * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent * config file, as then their values could not be overridden dynamically via system properties. */ - static Map APM_AGENT_DEFAULT_SETTINGS = Map.of("transaction_sample_rate", "0.2"); + static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( + "transaction_sample_rate", + "0.2", + "enable_experimental_instrumentations", + "true" + ); public void addClusterSettingsListeners(ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final APMTracer apmTracer = apmTelemetryProvider.getTracer(); + final APMMeter apmMeter = apmTelemetryProvider.getMeter(); clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); + this.setAgentSetting("instrument", Boolean.toString(enabled)); + }); + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_METRICS_ENABLED_SETTING, enabled -> { + apmMeter.setEnabled(enabled); // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. this.setAgentSetting("recording", Boolean.toString(enabled)); @@ -106,8 +117,10 @@ public void setAgentSetting(String key, String value) { private static final List PROHIBITED_AGENT_KEYS = List.of( // ES generates a config file and sets this value "config_file", - // ES controls this via `tracing.apm.enabled` - "recording" + // ES controls this via `telemetry.metrics.enabled` + "recording", + // ES controls this via `apm.enabled` + "instrument" ); public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( @@ -164,6 +177,13 @@ public void setAgentSetting(String key, String value) { NodeScope ); + public static final Setting TELEMETRY_METRICS_ENABLED_SETTING = Setting.boolSetting( + "telemetry.metrics.enabled", + false, + OperatorDynamic, + NodeScope + ); + public static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( APM_SETTING_PREFIX + "secret_token", null diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java index 495afd43bf176..ae9d91cc6ec51 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java @@ -10,19 +10,27 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; public class APMTelemetryProvider implements TelemetryProvider { private final Settings settings; private final APMTracer apmTracer; + private final APMMeter apmMeter; public APMTelemetryProvider(Settings settings) { this.settings = settings; apmTracer = new APMTracer(settings); + apmMeter = new APMMeter(settings); } @Override public APMTracer getTracer() { return apmTracer; } + + @Override + public APMMeter getMeter() { + return apmMeter; + } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java new file mode 100644 index 0000000000000..0a8d425579ca2 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.internal.APMTelemetryProvider; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.function.Supplier; + +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; + +public class APMMeter extends AbstractLifecycleComponent implements org.elasticsearch.telemetry.metric.Meter { + private final Instruments instruments; + + private final Supplier otelMeterSupplier; + private final Supplier noopMeterSupplier; + + private volatile boolean enabled; + + public APMMeter(Settings settings) { + this(settings, APMMeter.otelMeter(), APMMeter.noopMeter()); + } + + public APMMeter(Settings settings, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { + this.enabled = TELEMETRY_METRICS_ENABLED_SETTING.get(settings); + this.otelMeterSupplier = otelMeterSupplier; + this.noopMeterSupplier = noopMeterSupplier; + this.instruments = new Instruments(enabled ? createOtelMeter() : createNoopMeter()); + } + + /** + * @see org.elasticsearch.telemetry.apm.internal.APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider) + */ + public void setEnabled(boolean enabled) { + this.enabled = enabled; + if (enabled) { + instruments.setProvider(createOtelMeter()); + } else { + instruments.setProvider(createNoopMeter()); + } + } + + @Override + protected void doStart() {} + + @Override + protected void doStop() { + instruments.setProvider(createNoopMeter()); + } + + @Override + protected void doClose() {} + + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + return instruments.registerDoubleCounter(name, description, unit); + } + + @Override + public DoubleCounter getDoubleCounter(String name) { + return instruments.getDoubleCounter(name); + } + + @Override + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + return instruments.registerDoubleUpDownCounter(name, description, unit); + } + + @Override + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return instruments.getDoubleUpDownCounter(name); + } + + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + return instruments.registerDoubleGauge(name, description, unit); + } + + @Override + public DoubleGauge getDoubleGauge(String name) { + return instruments.getDoubleGauge(name); + } + + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + return instruments.registerDoubleHistogram(name, description, unit); + } + + @Override + public DoubleHistogram getDoubleHistogram(String name) { + return instruments.getDoubleHistogram(name); + } + + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { + return instruments.registerLongCounter(name, description, unit); + } + + @Override + public LongCounter getLongCounter(String name) { + return instruments.getLongCounter(name); + } + + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + return instruments.registerLongUpDownCounter(name, description, unit); + } + + @Override + public LongUpDownCounter getLongUpDownCounter(String name) { + return instruments.getLongUpDownCounter(name); + } + + @Override + public LongGauge registerLongGauge(String name, String description, String unit) { + return instruments.registerLongGauge(name, description, unit); + } + + @Override + public LongGauge getLongGauge(String name) { + return instruments.getLongGauge(name); + } + + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { + return instruments.registerLongHistogram(name, description, unit); + } + + @Override + public LongHistogram getLongHistogram(String name) { + return instruments.getLongHistogram(name); + } + + Meter createOtelMeter() { + assert this.enabled; + return AccessController.doPrivileged((PrivilegedAction) otelMeterSupplier::get); + } + + private Meter createNoopMeter() { + return noopMeterSupplier.get(); + } + + private static Supplier noopMeter() { + return () -> OpenTelemetry.noop().getMeter("noop"); + } + + // to be used within doPrivileged block + private static Supplier otelMeter() { + var openTelemetry = GlobalOpenTelemetry.get(); + var meter = openTelemetry.getMeter("elasticsearch"); + return () -> meter; + } + + // scope for testing + Instruments getInstruments() { + return instruments; + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java new file mode 100644 index 0000000000000..d3d485f52bc49 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.telemetry.metric.Instrument; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +/** + * An instrument that contains the name, description and unit. The delegate may be replaced when + * the provider is updated. + * Subclasses should implement the builder, which is used on initialization and provider updates. + * @param delegated instrument + */ +public abstract class AbstractInstrument implements Instrument { + private final AtomicReference delegate; + private final String name; + private final String description; + private final String unit; + + public AbstractInstrument(Meter meter, String name, String description, String unit) { + this.name = Objects.requireNonNull(name); + this.description = Objects.requireNonNull(description); + this.unit = Objects.requireNonNull(unit); + this.delegate = new AtomicReference<>(doBuildInstrument(meter)); + } + + private T doBuildInstrument(Meter meter) { + return AccessController.doPrivileged((PrivilegedAction) () -> buildInstrument(meter)); + } + + @Override + public String getName() { + return name; + } + + public String getUnit() { + return unit.toString(); + } + + T getInstrument() { + return delegate.get(); + } + + String getDescription() { + return description; + } + + void setProvider(@Nullable Meter meter) { + delegate.set(doBuildInstrument(Objects.requireNonNull(meter))); + } + + abstract T buildInstrument(Meter meter); +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java new file mode 100644 index 0000000000000..b25ffdff5481b --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement + */ +public class DoubleCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleCounter { + + public DoubleCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + io.opentelemetry.api.metrics.DoubleCounter buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .counterBuilder(getName()) + .ofDoubles() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void increment() { + getInstrument().add(1d); + } + + @Override + public void incrementBy(double inc) { + assert inc >= 0; + getInstrument().add(inc); + } + + @Override + public void incrementBy(double inc, Map attributes) { + assert inc >= 0; + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java new file mode 100644 index 0000000000000..9d55d475d4a93 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement + */ +public class DoubleGaugeAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleGauge { + + public DoubleGaugeAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.ObservableDoubleMeasurement buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).gaugeBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).buildObserver(); + } + + @Override + public void record(double value) { + getInstrument().record(value); + } + + @Override + public void record(double value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java new file mode 100644 index 0000000000000..5fd1a8a189b0f --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleHistogramAdapter wraps an otel DoubleHistogram + */ +public class DoubleHistogramAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleHistogram { + + public DoubleHistogramAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.DoubleHistogram buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).histogramBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void record(double value) { + getInstrument().record(value); + } + + @Override + public void record(double value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java new file mode 100644 index 0000000000000..9a2fc1b564766 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleUpDownCounterAdapter wraps an otel DoubleUpDownCounter + */ +public class DoubleUpDownCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleUpDownCounter { + + public DoubleUpDownCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.DoubleUpDownCounter buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .upDownCounterBuilder(getName()) + .ofDoubles() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void add(double inc) { + getInstrument().add(inc); + } + + @Override + public void add(double inc, Map attributes) { + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java new file mode 100644 index 0000000000000..92d7d692f0ea5 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Container for registering and fetching instruments by type and name. + * Instrument names must be unique for a given type on registration. + * {@link #setProvider(Meter)} is used to change the provider for all existing instruments. + */ +public class Instruments { + private final Registrar doubleCounters = new Registrar<>(); + private final Registrar doubleUpDownCounters = new Registrar<>(); + private final Registrar doubleGauges = new Registrar<>(); + private final Registrar doubleHistograms = new Registrar<>(); + private final Registrar longCounters = new Registrar<>(); + private final Registrar longUpDownCounters = new Registrar<>(); + private final Registrar longGauges = new Registrar<>(); + private final Registrar longHistograms = new Registrar<>(); + + private final Meter meter; + + public Instruments(Meter meter) { + this.meter = meter; + } + + private final List> registrars = List.of( + doubleCounters, + doubleUpDownCounters, + doubleGauges, + doubleHistograms, + longCounters, + longUpDownCounters, + longGauges, + longHistograms + ); + + // Access to registration has to be restricted when the provider is updated in ::setProvider + protected final ReleasableLock registerLock = new ReleasableLock(new ReentrantLock()); + + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleCounters.register(new DoubleCounterAdapter(meter, name, description, unit)); + } + } + + public DoubleCounter getDoubleCounter(String name) { + return doubleCounters.get(name); + } + + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleUpDownCounters.register(new DoubleUpDownCounterAdapter(meter, name, description, unit)); + } + } + + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return doubleUpDownCounters.get(name); + } + + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleGauges.register(new DoubleGaugeAdapter(meter, name, description, unit)); + } + } + + public DoubleGauge getDoubleGauge(String name) { + return doubleGauges.get(name); + } + + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleHistograms.register(new DoubleHistogramAdapter(meter, name, description, unit)); + } + } + + public DoubleHistogram getDoubleHistogram(String name) { + return doubleHistograms.get(name); + } + + public LongCounter registerLongCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longCounters.register(new LongCounterAdapter(meter, name, description, unit)); + } + } + + public LongCounter getLongCounter(String name) { + return longCounters.get(name); + } + + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longUpDownCounters.register(new LongUpDownCounterAdapter(meter, name, description, unit)); + } + } + + public LongUpDownCounter getLongUpDownCounter(String name) { + return longUpDownCounters.get(name); + } + + public LongGauge registerLongGauge(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longGauges.register(new LongGaugeAdapter(meter, name, description, unit)); + } + } + + public LongGauge getLongGauge(String name) { + return longGauges.get(name); + } + + public LongHistogram registerLongHistogram(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longHistograms.register(new LongHistogramAdapter(meter, name, description, unit)); + } + } + + public LongHistogram getLongHistogram(String name) { + return longHistograms.get(name); + } + + public void setProvider(Meter meter) { + try (ReleasableLock lock = registerLock.acquire()) { + for (Registrar registrar : registrars) { + registrar.setProvider(meter); + } + } + } + + /** + * A typed wrapper for a instrument that + * @param + */ + private static class Registrar> { + private final Map registered = ConcurrentCollections.newConcurrentMap(); + + T register(T instrument) { + registered.compute(instrument.getName(), (k, v) -> { + if (v != null) { + throw new IllegalStateException( + instrument.getClass().getSimpleName() + "[" + instrument.getName() + "] already registered" + ); + } + + return instrument; + }); + return instrument; + } + + T get(String name) { + return registered.get(name); + } + + void setProvider(Meter meter) { + registered.forEach((k, v) -> v.setProvider(meter)); + } + } + + // scope for testing + Meter getMeter() { + return meter; + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java new file mode 100644 index 0000000000000..122d16d9e1aa4 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongCounterAdapter wraps an otel LongCounter + */ +public class LongCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongCounter { + + public LongCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongCounter buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).counterBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void increment() { + getInstrument().add(1L); + } + + @Override + public void incrementBy(long inc) { + assert inc >= 0; + getInstrument().add(inc); + } + + @Override + public void incrementBy(long inc, Map attributes) { + assert inc >= 0; + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java new file mode 100644 index 0000000000000..48430285a5173 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongGaugeAdapter wraps an otel ObservableLongMeasurement + */ +public class LongGaugeAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongGauge { + + public LongGaugeAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.ObservableLongMeasurement buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .gaugeBuilder(getName()) + .ofLongs() + .setDescription(getDescription()) + .setUnit(getUnit()) + .buildObserver(); + } + + @Override + public void record(long value) { + getInstrument().record(value); + } + + @Override + public void record(long value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java new file mode 100644 index 0000000000000..bb5be4866e7b7 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongHistogramAdapter wraps an otel LongHistogram + */ +public class LongHistogramAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongHistogram { + + public LongHistogramAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongHistogram buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .histogramBuilder(getName()) + .ofLongs() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void record(long value) { + getInstrument().record(value); + } + + @Override + public void record(long value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java new file mode 100644 index 0000000000000..e5af85e4ed192 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongUpDownCounterAdapter wraps an otel LongUpDownCounter + */ +public class LongUpDownCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongUpDownCounter { + + public LongUpDownCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongUpDownCounter buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).upDownCounterBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void add(long inc) { + getInstrument().add(inc); + } + + @Override + public void add(long inc, Map attributes) { + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java new file mode 100644 index 0000000000000..673025a1a41f4 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.common.Attributes; + +import java.util.Map; + +class OtelHelper { + static Attributes fromMap(Map attributes) { + if (attributes == null || attributes.isEmpty()) { + return Attributes.empty(); + } + var builder = Attributes.builder(); + attributes.forEach((k, v) -> { + if (v instanceof String value) { + builder.put(k, value); + } else if (v instanceof Long value) { + builder.put(k, value); + } else if (v instanceof Double value) { + builder.put(k, value); + } else if (v instanceof Boolean value) { + builder.put(k, value); + } else { + throw new IllegalArgumentException("attributes do not support value type of [" + v.getClass().getCanonicalName() + "]"); + } + }); + return builder.build(); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 4f7380b14fa6d..b3d83a42ca2b4 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -150,7 +150,6 @@ APMServices createApmServices() { return AccessController.doPrivileged((PrivilegedAction) () -> { var openTelemetry = GlobalOpenTelemetry.get(); var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); - return new APMServices(tracer, openTelemetry); }); } @@ -452,4 +451,5 @@ private static Automaton patternsToAutomaton(List patterns) { } return Operations.union(automata); } + } diff --git a/modules/apm/src/main/plugin-metadata/plugin-security.policy b/modules/apm/src/main/plugin-metadata/plugin-security.policy index b85d3ec05c277..57da3a2efd301 100644 --- a/modules/apm/src/main/plugin-metadata/plugin-security.policy +++ b/modules/apm/src/main/plugin-metadata/plugin-security.policy @@ -11,6 +11,8 @@ grant { permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; permission java.util.PropertyPermission "elastic.apm.*", "write"; + permission java.util.PropertyPermission "*", "read,write"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; grant codeBase "${codebase.elastic-apm-agent}" { diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java new file mode 100644 index 0000000000000..1064b8820b089 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.sameInstance; + +public class APMMeterTests extends ESTestCase { + Meter testOtel = OpenTelemetry.noop().getMeter("test"); + + Meter noopOtel = OpenTelemetry.noop().getMeter("noop"); + + public void testMeterIsSetUponConstruction() { + // test default + APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + + // test explicitly enabled + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + + // test explicitly disabled + settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + } + + public void testMeterIsOverridden() { + APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + + apmMeter.setEnabled(true); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + } + + public void testLookupByName() { + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + + var apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); + DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); + + assertThat(lookedUpCounter, sameInstance(registeredCounter)); + } + + public void testNoopIsSetOnStop() { + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + APMMeter apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + apmMeter.start(); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + + apmMeter.stop(); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + } + +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java new file mode 100644 index 0000000000000..51285894f27ee --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableLongCounter; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.CountDownLatch; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +public class InstrumentsConcurrencyTests extends ESTestCase { + String name = "name"; + String description = "desc"; + String unit = "kg"; + Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); + CountDownLatch registerLatch = new CountDownLatch(1); + Meter lockingMeter = new Meter() { + @Override + public LongCounterBuilder counterBuilder(String name) { + return new LockingLongCounterBuilder(); + } + + @Override + public LongUpDownCounterBuilder upDownCounterBuilder(String name) { + return null; + } + + @Override + public DoubleHistogramBuilder histogramBuilder(String name) { + return null; + } + + @Override + public DoubleGaugeBuilder gaugeBuilder(String name) { + return null; + } + }; + + class LockingLongCounterBuilder implements LongCounterBuilder { + + @Override + public LongCounterBuilder setDescription(String description) { + return this; + } + + @Override + public LongCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleCounterBuilder ofDoubles() { + return null; + } + + @Override + public LongCounter build() { + try { + registerLatch.await(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return null; + } + + @Override + public ObservableLongCounter buildWithCallback(Consumer callback) { + return null; + } + } + + public void testLockingWhenRegistering() throws Exception { + Instruments instruments = new Instruments(lockingMeter); + + var registerThread = new Thread(() -> instruments.registerLongCounter(name, description, unit)); + // registerThread has a countDown latch that is simulating a long-running registration + registerThread.start(); + var setProviderThread = new Thread(() -> instruments.setProvider(noopMeter)); + // a setProviderThread will attempt to override a meter, but will wait to acquireLock + setProviderThread.start(); + + // assert that a thread is waiting for a lock during long-running registration + assertBusy(() -> assertThat(setProviderThread.getState(), equalTo(Thread.State.WAITING))); + // assert that the old lockingMeter is still in place + assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + + // finish long-running registration + registerLatch.countDown(); + // assert that a meter was overriden + assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java new file mode 100644 index 0000000000000..daf511fcf7042 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +public class InstrumentsTests extends ESTestCase { + Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); + Meter someOtherMeter = OpenTelemetry.noop().getMeter("xyz"); + String name = "name"; + String description = "desc"; + String unit = "kg"; + + public void testRegistrationAndLookup() { + Instruments instruments = new Instruments(noopMeter); + { + var registered = instruments.registerDoubleCounter(name, description, unit); + var lookedUp = instruments.getDoubleCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleUpDownCounter(name, description, unit); + var lookedUp = instruments.getDoubleUpDownCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleGauge(name, description, unit); + var lookedUp = instruments.getDoubleGauge(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleHistogram(name, description, unit); + var lookedUp = instruments.getDoubleHistogram(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongCounter(name, description, unit); + var lookedUp = instruments.getLongCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongUpDownCounter(name, description, unit); + var lookedUp = instruments.getLongUpDownCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongGauge(name, description, unit); + var lookedUp = instruments.getLongGauge(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongHistogram(name, description, unit); + var lookedUp = instruments.getLongHistogram(name); + assertThat(registered, sameInstance(lookedUp)); + } + } + + public void testNameValidation() { + Instruments instruments = new Instruments(noopMeter); + + instruments.registerLongHistogram(name, description, unit); + var e = expectThrows(IllegalStateException.class, () -> instruments.registerLongHistogram(name, description, unit)); + assertThat(e.getMessage(), equalTo("LongHistogramAdapter[name] already registered")); + } +} diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 384970bdc7ab9..bd4a449f640ce 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1320,11 +1320,17 @@ public void testGetDataStream() throws Exception { ).actionGet(); assertThat(response.getDataStreams().size(), is(1)); DataStreamInfo metricsFooDataStream = response.getDataStreams().get(0); - assertThat(metricsFooDataStream.getDataStream().getName(), is("metrics-foo")); + DataStream dataStream = metricsFooDataStream.getDataStream(); + assertThat(dataStream.getName(), is("metrics-foo")); assertThat(metricsFooDataStream.getDataStreamStatus(), is(ClusterHealthStatus.YELLOW)); assertThat(metricsFooDataStream.getIndexTemplate(), is("template_for_foo")); assertThat(metricsFooDataStream.getIlmPolicy(), is(nullValue())); - assertThat(metricsFooDataStream.getDataStream().getLifecycle(), is(lifecycle)); + assertThat(dataStream.getLifecycle(), is(lifecycle)); + assertThat(metricsFooDataStream.templatePreferIlmValue(), is(true)); + GetDataStreamAction.Response.IndexProperties indexProperties = metricsFooDataStream.getIndexSettingsValues() + .get(dataStream.getWriteIndex()); + assertThat(indexProperties.ilmPolicyName(), is(nullValue())); + assertThat(indexProperties.preferIlm(), is(true)); } private static void assertBackingIndex(String backingIndex, String timestampFieldPathInMapping, Map expectedMapping) { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index 73af952af524d..de81ca9bef18c 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -11,6 +11,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.IndexProperties; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; @@ -21,6 +23,7 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -39,9 +42,12 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.index.IndexSettings.PREFER_ILM_SETTING; + public class GetDataStreamsTransportAction extends TransportMasterNodeReadAction< GetDataStreamAction.Request, GetDataStreamAction.Response> { @@ -95,6 +101,7 @@ static GetDataStreamAction.Response innerOperation( List dataStreamInfos = new ArrayList<>(dataStreams.size()); for (DataStream dataStream : dataStreams) { final String indexTemplate; + boolean indexTemplatePreferIlmValue = true; String ilmPolicyName = null; if (dataStream.isSystem()) { SystemDataStreamDescriptor dataStreamDescriptor = systemIndices.findMatchingDataStreamDescriptor(dataStream.getName()); @@ -104,13 +111,15 @@ static GetDataStreamAction.Response innerOperation( dataStreamDescriptor.getComposableIndexTemplate(), dataStreamDescriptor.getComponentTemplates() ); - ilmPolicyName = settings.get("index.lifecycle.name"); + ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } } else { indexTemplate = MetadataIndexTemplateService.findV2Template(state.metadata(), dataStream.getName(), false); if (indexTemplate != null) { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), indexTemplate); - ilmPolicyName = settings.get("index.lifecycle.name"); + ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } else { LOGGER.warn( "couldn't find any matching template for data stream [{}]. has it been restored (and possibly renamed)" @@ -125,18 +134,35 @@ static GetDataStreamAction.Response innerOperation( dataStream.getIndices().stream().map(Index::getName).toArray(String[]::new) ); + Map backingIndicesSettingsValues = new HashMap<>(); + Metadata metadata = state.getMetadata(); + for (Index index : dataStream.getIndices()) { + IndexMetadata indexMetadata = metadata.index(index); + Boolean preferIlm = PREFER_ILM_SETTING.get(indexMetadata.getSettings()); + assert preferIlm != null : "must use the default prefer ilm setting value, if nothing else"; + ManagedBy managedBy; + if (metadata.isIndexManagedByILM(indexMetadata)) { + managedBy = ManagedBy.ILM; + } else if (dataStream.isIndexManagedByDataStreamLifecycle(index, metadata::index)) { + managedBy = ManagedBy.LIFECYCLE; + } else { + managedBy = ManagedBy.UNMANAGED; + } + backingIndicesSettingsValues.put(index, new IndexProperties(preferIlm, indexMetadata.getLifecyclePolicyName(), managedBy)); + } + GetDataStreamAction.Response.TimeSeries timeSeries = null; if (dataStream.getIndexMode() == IndexMode.TIME_SERIES) { List> ranges = new ArrayList<>(); Tuple current = null; String previousIndexName = null; for (Index index : dataStream.getIndices()) { - IndexMetadata metadata = state.getMetadata().index(index); - if (metadata.getIndexMode() != IndexMode.TIME_SERIES) { + IndexMetadata indexMetadata = metadata.index(index); + if (indexMetadata.getIndexMode() != IndexMode.TIME_SERIES) { continue; } - Instant start = metadata.getTimeSeriesStart(); - Instant end = metadata.getTimeSeriesEnd(); + Instant start = indexMetadata.getTimeSeriesStart(); + Instant end = indexMetadata.getTimeSeriesEnd(); if (current == null) { current = new Tuple<>(start, end); } else if (current.v2().compareTo(start) == 0) { @@ -175,7 +201,9 @@ static GetDataStreamAction.Response innerOperation( streamHealth.getStatus(), indexTemplate, ilmPolicyName, - timeSeries + timeSeries, + backingIndicesSettingsValues, + indexTemplatePreferIlmValue ) ); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 469c72e539c45..12e1604d10c1f 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -8,15 +8,33 @@ package org.elasticsearch.datastreams.action; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; import java.time.Instant; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import static org.elasticsearch.cluster.metadata.DataStream.getDefaultBackingIndexName; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class GetDataStreamsResponseTests extends AbstractWireSerializingTestCase { @@ -43,13 +61,198 @@ protected Response mutateInstance(Response instance) { return new Response(instance.getDataStreams().stream().map(this::mutateInstance).toList()); } + @SuppressWarnings("unchecked") + public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Exception { + // we'll test a data stream with 3 backing indices - two managed by ILM (having the ILM policy configured for them) + // and one without any ILM policy configured + String dataStreamName = "logs"; + + Index firstGenerationIndex = new Index(getDefaultBackingIndexName(dataStreamName, 1), UUIDs.base64UUID()); + Index secondGenerationIndex = new Index(getDefaultBackingIndexName(dataStreamName, 2), UUIDs.base64UUID()); + Index writeIndex = new Index(getDefaultBackingIndexName(dataStreamName, 3), UUIDs.base64UUID()); + List indices = List.of(firstGenerationIndex, secondGenerationIndex, writeIndex); + { + // data stream has an enabled lifecycle + DataStream logs = new DataStream( + "logs", + indices, + 3, + null, + false, + false, + false, + true, + IndexMode.STANDARD, + new DataStreamLifecycle() + ); + + String ilmPolicyName = "rollover-30days"; + Map indexSettingsValues = Map.of( + firstGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + secondGenerationIndex, + new Response.IndexProperties(false, ilmPolicyName, ManagedBy.LIFECYCLE), + writeIndex, + new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE) + ); + + Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( + logs, + ClusterHealthStatus.GREEN, + "index-template", + null, + null, + indexSettingsValues, + false + ); + Response response = new Response(List.of(dataStreamInfo)); + XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); + response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + + BytesReference bytes = BytesReference.bytes(contentBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { + Map map = parser.map(); + List dataStreams = (List) map.get(Response.DATA_STREAMS_FIELD.getPreferredName()); + assertThat(dataStreams.size(), is(1)); + Map dataStreamMap = (Map) dataStreams.get(0); + assertThat(dataStreamMap.get(DataStream.NAME_FIELD.getPreferredName()), is(dataStreamName)); + + assertThat(dataStreamMap.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(dataStreamMap.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat(dataStreamMap.get(Response.DataStreamInfo.LIFECYCLE_FIELD.getPreferredName()), is(Map.of("enabled", true))); + assertThat( + dataStreamMap.get(Response.DataStreamInfo.NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + + List indicesRepresentation = (List) dataStreamMap.get(DataStream.INDICES_FIELD.getPreferredName()); + Map firstGenIndexRepresentation = (Map) indicesRepresentation.get(0); + assertThat(firstGenIndexRepresentation.get("index_name"), is(firstGenerationIndex.getName())); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(true)); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(ilmPolicyName)); + assertThat( + firstGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.ILM.displayValue) + ); + + Map secondGenIndexRepresentation = (Map) indicesRepresentation.get(1); + assertThat(secondGenIndexRepresentation.get("index_name"), is(secondGenerationIndex.getName())); + assertThat(secondGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat( + secondGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), + is(ilmPolicyName) + ); + assertThat( + secondGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + + // the write index is managed by data stream lifecycle + Map writeIndexRepresentation = (Map) indicesRepresentation.get(2); + assertThat(writeIndexRepresentation.get("index_name"), is(writeIndex.getName())); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat( + writeIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + } + } + + { + // data stream has a lifecycle that's not enabled + DataStream logs = new DataStream( + "logs", + indices, + 3, + null, + false, + false, + false, + true, + IndexMode.STANDARD, + new DataStreamLifecycle(null, null, false) + ); + + String ilmPolicyName = "rollover-30days"; + Map indexSettingsValues = Map.of( + firstGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + secondGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + writeIndex, + new Response.IndexProperties(false, null, ManagedBy.UNMANAGED) + ); + + Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( + logs, + ClusterHealthStatus.GREEN, + "index-template", + null, + null, + indexSettingsValues, + false + ); + Response response = new Response(List.of(dataStreamInfo)); + XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); + response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + + BytesReference bytes = BytesReference.bytes(contentBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { + Map map = parser.map(); + List dataStreams = (List) map.get(Response.DATA_STREAMS_FIELD.getPreferredName()); + assertThat(dataStreams.size(), is(1)); + Map dataStreamMap = (Map) dataStreams.get(0); + assertThat(dataStreamMap.get(DataStream.NAME_FIELD.getPreferredName()), is(dataStreamName)); + // note that the prefer_ilm value is displayed at the top level even if the template backing the data stream doesn't have a + // policy specified anymore + assertThat(dataStreamMap.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(dataStreamMap.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat(dataStreamMap.get(Response.DataStreamInfo.LIFECYCLE_FIELD.getPreferredName()), is(Map.of("enabled", false))); + assertThat( + dataStreamMap.get(Response.DataStreamInfo.NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName()), + is(ManagedBy.UNMANAGED.displayValue) + ); + + List indicesRepresentation = (List) dataStreamMap.get(DataStream.INDICES_FIELD.getPreferredName()); + Map firstGenIndexRepresentation = (Map) indicesRepresentation.get(0); + assertThat(firstGenIndexRepresentation.get("index_name"), is(firstGenerationIndex.getName())); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(true)); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(ilmPolicyName)); + assertThat( + firstGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.ILM.displayValue) + ); + + // the write index is managed by data stream lifecycle + Map writeIndexRepresentation = (Map) indicesRepresentation.get(2); + assertThat(writeIndexRepresentation.get("index_name"), is(writeIndex.getName())); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat( + writeIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.UNMANAGED.displayValue) + ); + } + } + } + + public void testManagedByDisplayValuesDontAccidentalyChange() { + // UI might derive logic based on the display values so any changes should be coordinated with the UI team + assertThat(ManagedBy.ILM.displayValue, is("Index Lifecycle Management")); + assertThat(ManagedBy.LIFECYCLE.displayValue, is("Data stream lifecycle")); + assertThat(ManagedBy.UNMANAGED.displayValue, is("Unmanaged")); + } + private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) { var dataStream = instance.getDataStream(); var status = instance.getDataStreamStatus(); var indexTemplate = instance.getIndexTemplate(); var ilmPolicyName = instance.getIlmPolicy(); var timeSeries = instance.getTimeSeries(); - switch (randomIntBetween(0, 4)) { + var indexSettings = instance.getIndexSettingsValues(); + var templatePreferIlm = instance.templatePreferIlmValue(); + switch (randomIntBetween(0, 6)) { case 0 -> dataStream = randomValueOtherThan(dataStream, DataStreamTestHelper::randomInstance); case 1 -> status = randomValueOtherThan(status, () -> randomFrom(ClusterHealthStatus.values())); case 2 -> indexTemplate = randomBoolean() && indexTemplate != null ? null : randomAlphaOfLengthBetween(2, 10); @@ -57,8 +260,22 @@ private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) case 4 -> timeSeries = randomBoolean() && timeSeries != null ? null : randomValueOtherThan(timeSeries, () -> new Response.TimeSeries(generateRandomTimeSeries())); + case 5 -> indexSettings = randomValueOtherThan( + indexSettings, + () -> randomBoolean() + ? Map.of() + : Map.of( + new Index(randomAlphaOfLengthBetween(50, 100), UUIDs.base64UUID()), + new Response.IndexProperties( + randomBoolean(), + randomAlphaOfLengthBetween(50, 100), + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + ) + ) + ); + case 6 -> templatePreferIlm = templatePreferIlm ? false : true; } - return new Response.DataStreamInfo(dataStream, status, indexTemplate, ilmPolicyName, timeSeries); + return new Response.DataStreamInfo(dataStream, status, indexTemplate, ilmPolicyName, timeSeries, indexSettings, templatePreferIlm); } private List> generateRandomTimeSeries() { @@ -70,6 +287,21 @@ private List> generateRandomTimeSeries() { return timeSeries; } + private Map generateRandomIndexSettingsValues() { + Map values = new HashMap<>(); + for (int i = 0; i < randomIntBetween(0, 3); i++) { + values.put( + new Index(randomAlphaOfLengthBetween(50, 100), UUIDs.base64UUID()), + new Response.IndexProperties( + randomBoolean(), + randomAlphaOfLengthBetween(50, 100), + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + ) + ); + } + return values; + } + private Response.DataStreamInfo generateRandomDataStreamInfo() { List> timeSeries = randomBoolean() ? generateRandomTimeSeries() : null; return new Response.DataStreamInfo( @@ -77,7 +309,9 @@ private Response.DataStreamInfo generateRandomDataStreamInfo() { ClusterHealthStatus.GREEN, randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10), - timeSeries != null ? new Response.TimeSeries(timeSeries) : null + timeSeries != null ? new Response.TimeSeries(timeSeries) : null, + generateRandomIndexSettingsValues(), + randomBoolean() ); } } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 50c8e2c74dc74..09cec438d10cc 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -311,6 +311,77 @@ setup: name: simple-data-stream2 - is_true: acknowledged +--- +"Get data stream and check DSL and ILM information": + - skip: + version: " - 8.10.99" + reason: "data streams DSL and ILM mixing information available in 8.11+" + + - do: + allowed_warnings: + - "index template [mixing-dsl-template] has index patterns [mixing-dsl-stream] matching patterns from existing older templates + [global] with patterns (global => [*]); this template [mixing-dsl-template] will take precedence during new index creation" + indices.put_index_template: + name: mixing-dsl-template + body: + index_patterns: [mixing-dsl-stream] + template: + mappings: + properties: + '@timestamp': + type: date_nanos + lifecycle: + data_retention: "30d" + enabled: false + settings: + index.lifecycle.prefer_ilm: false + index.lifecycle.name: "missing_ilm_policy" + data_stream: {} + + - do: + indices.create_data_stream: + name: mixing-dsl-stream + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: mixing-dsl-stream + - match: { data_streams.0.name: mixing-dsl-stream } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - match: { data_streams.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.prefer_ilm: false } + - match: { data_streams.0.next_generation_managed_by: "Index Lifecycle Management" } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.prefer_ilm: false } + - match: { data_streams.0.indices.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.indices.0.managed_by: "Index Lifecycle Management" } + + - do: + indices.put_data_lifecycle: + name: "*" + body: > + { + "data_retention": "30d", + "enabled": true + } + + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: mixing-dsl-stream + - match: { data_streams.0.name: mixing-dsl-stream } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - match: { data_streams.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.prefer_ilm: false } + - match: { data_streams.0.next_generation_managed_by: "Data stream lifecycle" } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.prefer_ilm: false } + - match: { data_streams.0.indices.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.indices.0.managed_by: "Data stream lifecycle" } + --- "Delete data stream with backing indices": - skip: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml index 3644e93d12bbd..27aa0e6e9a20b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml @@ -145,8 +145,8 @@ "Sparse vector in 7.x": - skip: features: allowed_warnings - version: "8.0.0 - " - reason: "sparse_vector field type supported in 7.x" + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/100003" - do: allowed_warnings: - "The [sparse_vector] field type is deprecated and will be removed in 8.0." diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 99ce5910c9775..1a082e7558577 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -385,6 +385,7 @@ org.elasticsearch.serverless.apifiltering; exports org.elasticsearch.telemetry.tracing; exports org.elasticsearch.telemetry; + exports org.elasticsearch.telemetry.metric; provides java.util.spi.CalendarDataProvider with org.elasticsearch.common.time.IsoCalendarDataProvider; provides org.elasticsearch.xcontent.ErrorOnUnknown with org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 29e42630ec23e..657ab0441b01d 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -145,8 +145,10 @@ static TransportVersion def(int id) { public static final TransportVersion WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED = def(8_502_00_0); public static final TransportVersion RECOVERY_COMMIT_TOO_NEW_EXCEPTION_ADDED = def(8_503_00_0); public static final TransportVersion NODE_INFO_COMPONENT_VERSIONS_ADDED = def(8_504_00_0); + public static final TransportVersion COMPACT_FIELD_CAPS_ADDED = def(8_505_00_0); + public static final TransportVersion DATA_STREAM_RESPONSE_INDEX_PROPERTIES = def(8_506_00_0); + public static final TransportVersion ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED = def(8_507_00_0); public static final TransportVersion NESTED_KNN_VECTOR_QUERY_V = def(8_599_00_0); - /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index aa69ede54dea1..9c1fb63a6b8d0 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -32,8 +33,11 @@ import java.time.Instant; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.DATA_STREAM_RESPONSE_INDEX_PROPERTIES; + public class GetDataStreamAction extends ActionType { public static final GetDataStreamAction INSTANCE = new GetDataStreamAction(); @@ -142,12 +146,28 @@ public Request includeDefaults(boolean includeDefaults) { } public static class Response extends ActionResponse implements ToXContentObject { + + public enum ManagedBy { + ILM("Index Lifecycle Management"), + LIFECYCLE("Data stream lifecycle"), + UNMANAGED("Unmanaged"); + + public final String displayValue; + + ManagedBy(String displayValue) { + this.displayValue = displayValue; + } + } + public static final ParseField DATA_STREAMS_FIELD = new ParseField("data_streams"); public static class DataStreamInfo implements SimpleDiffable, ToXContentObject { public static final ParseField STATUS_FIELD = new ParseField("status"); public static final ParseField INDEX_TEMPLATE_FIELD = new ParseField("template"); + public static final ParseField PREFER_ILM = new ParseField("prefer_ilm"); + public static final ParseField MANAGED_BY = new ParseField("managed_by"); + public static final ParseField NEXT_GENERATION_INDEX_MANAGED_BY = new ParseField("next_generation_managed_by"); public static final ParseField ILM_POLICY_FIELD = new ParseField("ilm_policy"); public static final ParseField LIFECYCLE_FIELD = new ParseField("lifecycle"); public static final ParseField HIDDEN_FIELD = new ParseField("hidden"); @@ -167,28 +187,39 @@ public static class DataStreamInfo implements SimpleDiffable, To private final String ilmPolicyName; @Nullable private final TimeSeries timeSeries; + private final Map indexSettingsValues; + private final boolean templatePreferIlmValue; public DataStreamInfo( DataStream dataStream, ClusterHealthStatus dataStreamStatus, @Nullable String indexTemplate, @Nullable String ilmPolicyName, - @Nullable TimeSeries timeSeries + @Nullable TimeSeries timeSeries, + Map indexSettingsValues, + boolean templatePreferIlmValue ) { this.dataStream = dataStream; this.dataStreamStatus = dataStreamStatus; this.indexTemplate = indexTemplate; this.ilmPolicyName = ilmPolicyName; this.timeSeries = timeSeries; + this.indexSettingsValues = indexSettingsValues; + this.templatePreferIlmValue = templatePreferIlmValue; } + @SuppressWarnings("unchecked") DataStreamInfo(StreamInput in) throws IOException { this( new DataStream(in), ClusterHealthStatus.readFrom(in), in.readOptionalString(), in.readOptionalString(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null, + in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) + ? in.readMap(Index::new, IndexProperties::new) + : Map.of(), + in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) ? in.readBoolean() : true ); } @@ -215,6 +246,14 @@ public TimeSeries getTimeSeries() { return timeSeries; } + public Map getIndexSettingsValues() { + return indexSettingsValues; + } + + public boolean templatePreferIlmValue() { + return templatePreferIlmValue; + } + @Override public void writeTo(StreamOutput out) throws IOException { dataStream.writeTo(out); @@ -224,6 +263,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { out.writeOptionalWriteable(timeSeries); } + if (out.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES)) { + out.writeMap(indexSettingsValues); + out.writeBoolean(templatePreferIlmValue); + } } @Override @@ -242,7 +285,27 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla .startObject() .field(DataStream.NAME_FIELD.getPreferredName(), DataStream.TIMESTAMP_FIELD_NAME) .endObject(); - builder.xContentList(DataStream.INDICES_FIELD.getPreferredName(), dataStream.getIndices()); + + builder.field(DataStream.INDICES_FIELD.getPreferredName()); + if (dataStream.getIndices() == null) { + builder.nullValue(); + } else { + builder.startArray(); + for (Index index : dataStream.getIndices()) { + builder.startObject(); + index.toXContentFragment(builder); + IndexProperties indexProperties = indexSettingsValues.get(index); + if (indexProperties != null) { + builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); + if (indexProperties.ilmPolicyName() != null) { + builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); + } + builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); + } + builder.endObject(); + } + builder.endArray(); + } builder.field(DataStream.GENERATION_FIELD.getPreferredName(), dataStream.getGeneration()); if (dataStream.getMetadata() != null) { builder.field(DataStream.METADATA_FIELD.getPreferredName(), dataStream.getMetadata()); @@ -258,6 +321,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla if (ilmPolicyName != null) { builder.field(ILM_POLICY_FIELD.getPreferredName(), ilmPolicyName); } + builder.field(NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName(), getNextGenerationManagedBy().displayValue); + builder.field(PREFER_ILM.getPreferredName(), templatePreferIlmValue); builder.field(HIDDEN_FIELD.getPreferredName(), dataStream.isHidden()); builder.field(SYSTEM_FIELD.getPreferredName(), dataStream.isSystem()); builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), dataStream.isAllowCustomRouting()); @@ -280,21 +345,55 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla return builder; } + /** + * Computes and returns which system will manage the next generation for this data stream. + */ + public ManagedBy getNextGenerationManagedBy() { + // both ILM and DSL are configured so let's check the prefer_ilm setting to see which system takes precedence + if (ilmPolicyName != null && dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + return templatePreferIlmValue ? ManagedBy.ILM : ManagedBy.LIFECYCLE; + } + + if (ilmPolicyName != null) { + return ManagedBy.ILM; + } + + if (dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + return ManagedBy.LIFECYCLE; + } + + return ManagedBy.UNMANAGED; + } + @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } DataStreamInfo that = (DataStreamInfo) o; - return dataStream.equals(that.dataStream) + return templatePreferIlmValue == that.templatePreferIlmValue + && Objects.equals(dataStream, that.dataStream) && dataStreamStatus == that.dataStreamStatus && Objects.equals(indexTemplate, that.indexTemplate) && Objects.equals(ilmPolicyName, that.ilmPolicyName) - && Objects.equals(timeSeries, that.timeSeries); + && Objects.equals(timeSeries, that.timeSeries) + && Objects.equals(indexSettingsValues, that.indexSettingsValues); } @Override public int hashCode() { - return Objects.hash(dataStream, dataStreamStatus, indexTemplate, ilmPolicyName, timeSeries); + return Objects.hash( + dataStream, + dataStreamStatus, + indexTemplate, + ilmPolicyName, + timeSeries, + indexSettingsValues, + templatePreferIlmValue + ); } } @@ -326,6 +425,23 @@ public int hashCode() { } } + /** + * Encapsulates the configured properties we want to display for each backing index. + * They'll usually be settings values, but could also be additional properties derived from settings. + */ + public record IndexProperties(boolean preferIlm, @Nullable String ilmPolicyName, ManagedBy managedBy) implements Writeable { + public IndexProperties(StreamInput in) throws IOException { + this(in.readBoolean(), in.readOptionalString(), in.readEnum(ManagedBy.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(preferIlm); + out.writeOptionalString(ilmPolicyName); + out.writeEnum(managedBy); + } + } + private final List dataStreams; @Nullable private final RolloverConfiguration rolloverConfiguration; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 7dc73940ce2ff..969d86f5f470c 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -178,7 +178,7 @@ static Map retrieveFieldCaps( false, false, null, - Collections.emptyMap() + Map.of() ); responseMap.put(parentField, fieldCap); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index aac322c4a1de7..06ea2dee17481 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -13,15 +13,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; final class FieldCapabilitiesIndexResponse implements Writeable { private static final TransportVersion MAPPING_HASH_VERSION = TransportVersions.V_8_2_0; @@ -48,7 +49,7 @@ final class FieldCapabilitiesIndexResponse implements Writeable { FieldCapabilitiesIndexResponse(StreamInput in) throws IOException { this.indexName = in.readString(); - this.responseMap = in.readMap(IndexFieldCapabilities::new); + this.responseMap = in.readMap(IndexFieldCapabilities::readFrom); this.canMatch = in.readBoolean(); this.originVersion = in.getTransportVersion(); if (in.getTransportVersion().onOrAfter(MAPPING_HASH_VERSION)) { @@ -68,32 +69,57 @@ public void writeTo(StreamOutput out) throws IOException { } } - private record GroupByMappingHash(List indices, String indexMappingHash, Map responseMap) - implements - Writeable { - GroupByMappingHash(StreamInput in) throws IOException { - this(in.readStringCollectionAsList(), in.readString(), in.readMap(IndexFieldCapabilities::new)); - } + private record CompressedGroup(String[] indices, String mappingHash, int[] fields) {} - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeStringCollection(indices); - out.writeString(indexMappingHash); - out.writeMap(responseMap, StreamOutput::writeWriteable); + static List readList(StreamInput input) throws IOException { + if (input.getTransportVersion().before(MAPPING_HASH_VERSION)) { + return input.readCollectionAsList(FieldCapabilitiesIndexResponse::new); + } + final int ungrouped = input.readVInt(); + final ArrayList responses = new ArrayList<>(ungrouped); + for (int i = 0; i < ungrouped; i++) { + responses.add(new FieldCapabilitiesIndexResponse(input)); + } + final int groups = input.readVInt(); + if (input.getTransportVersion().onOrAfter(TransportVersions.COMPACT_FIELD_CAPS_ADDED)) { + collectCompressedResponses(input, groups, responses); + } else { + collectResponsesLegacyFormat(input, groups, responses); } + return responses; + } - Stream getResponses() { - return indices.stream().map(index -> new FieldCapabilitiesIndexResponse(index, indexMappingHash, responseMap, true)); + private static void collectCompressedResponses(StreamInput input, int groups, ArrayList responses) + throws IOException { + final CompressedGroup[] compressedGroups = new CompressedGroup[groups]; + for (int i = 0; i < groups; i++) { + final String[] indices = input.readStringArray(); + final String mappingHash = input.readString(); + compressedGroups[i] = new CompressedGroup(indices, mappingHash, input.readIntArray()); + } + final IndexFieldCapabilities[] ifcLookup = input.readArray(IndexFieldCapabilities::readFrom, IndexFieldCapabilities[]::new); + for (CompressedGroup compressedGroup : compressedGroups) { + final Map ifc = Maps.newMapWithExpectedSize(compressedGroup.fields.length); + for (int i : compressedGroup.fields) { + var val = ifcLookup[i]; + ifc.put(val.name(), val); + } + for (String index : compressedGroup.indices) { + responses.add(new FieldCapabilitiesIndexResponse(index, compressedGroup.mappingHash, ifc, true)); + } } } - static List readList(StreamInput input) throws IOException { - if (input.getTransportVersion().before(MAPPING_HASH_VERSION)) { - return input.readCollectionAsList(FieldCapabilitiesIndexResponse::new); + private static void collectResponsesLegacyFormat(StreamInput input, int groups, ArrayList responses) + throws IOException { + for (int i = 0; i < groups; i++) { + final List indices = input.readStringCollectionAsList(); + final String mappingHash = input.readString(); + final Map ifc = input.readMap(IndexFieldCapabilities::readFrom); + for (String index : indices) { + responses.add(new FieldCapabilitiesIndexResponse(index, mappingHash, ifc, true)); + } } - final List ungroupedList = input.readCollectionAsList(FieldCapabilitiesIndexResponse::new); - final List groups = input.readCollectionAsList(GroupByMappingHash::new); - return Stream.concat(ungroupedList.stream(), groups.stream().flatMap(GroupByMappingHash::getResponses)).toList(); } static void writeList(StreamOutput output, List responses) throws IOException { @@ -101,22 +127,54 @@ static void writeList(StreamOutput output, List output.writeCollection(responses); return; } - final Predicate canGroup = r -> r.canMatch && r.indexMappingHash != null; - final List ungroupedResponses = responses.stream().filter(r -> canGroup.test(r) == false).toList(); - final List groupedResponses = responses.stream() - .filter(canGroup) - .collect(Collectors.groupingBy(r -> r.indexMappingHash)) - .values() - .stream() - .map(rs -> { - final String indexMappingHash = rs.get(0).indexMappingHash; - final Map responseMap = rs.get(0).responseMap; - final List indices = rs.stream().map(r -> r.indexName).toList(); - return new GroupByMappingHash(indices, indexMappingHash, responseMap); - }) - .toList(); + + Map> groupedResponsesMap = new HashMap<>(); + final List ungroupedResponses = new ArrayList<>(); + for (FieldCapabilitiesIndexResponse r : responses) { + if (r.canMatch && r.indexMappingHash != null) { + groupedResponsesMap.computeIfAbsent(r.indexMappingHash, k -> new ArrayList<>()).add(r); + } else { + ungroupedResponses.add(r); + } + } + output.writeCollection(ungroupedResponses); - output.writeCollection(groupedResponses); + if (output.getTransportVersion().onOrAfter(TransportVersions.COMPACT_FIELD_CAPS_ADDED)) { + writeCompressedResponses(output, groupedResponsesMap); + } else { + writeResponsesLegacyFormat(output, groupedResponsesMap); + } + } + + private static void writeResponsesLegacyFormat( + StreamOutput output, + Map> groupedResponsesMap + ) throws IOException { + output.writeCollection(groupedResponsesMap.values(), (o, fieldCapabilitiesIndexResponses) -> { + o.writeCollection(fieldCapabilitiesIndexResponses, (oo, r) -> oo.writeString(r.indexName)); + var first = fieldCapabilitiesIndexResponses.get(0); + o.writeString(first.indexMappingHash); + o.writeMap(first.responseMap, StreamOutput::writeWriteable); + }); + } + + private static void writeCompressedResponses(StreamOutput output, Map> groupedResponsesMap) + throws IOException { + final Map fieldDedupMap = new LinkedHashMap<>(); + output.writeCollection(groupedResponsesMap.values(), (o, fieldCapabilitiesIndexResponses) -> { + o.writeCollection(fieldCapabilitiesIndexResponses, (oo, r) -> oo.writeString(r.indexName)); + var first = fieldCapabilitiesIndexResponses.get(0); + o.writeString(first.indexMappingHash); + o.writeVInt(first.responseMap.size()); + for (IndexFieldCapabilities ifc : first.responseMap.values()) { + Integer offset = fieldDedupMap.size(); + final Integer found = fieldDedupMap.putIfAbsent(ifc, offset); + o.writeInt(found == null ? offset : found); + } + }); + // this is a linked hash map so the key-set is written in insertion order, so we can just write it out in order and then read it + // back as an array of FieldCapabilitiesIndexResponse in #collectCompressedResponses to use as a lookup + output.writeCollection(fieldDedupMap.keySet()); } /** @@ -145,14 +203,6 @@ public Map get() { return responseMap; } - /** - * - * Get the field capabilities for the provided {@code field} - */ - public IndexFieldCapabilities getField(String field) { - return responseMap.get(field); - } - TransportVersion getOriginVersion() { return originVersion; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java index 57a9dd049d26c..de2f6965e011d 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java @@ -17,65 +17,54 @@ import java.io.IOException; import java.util.Map; -import java.util.Objects; /** * Describes the capabilities of a field in a single index. + * @param name The name of the field. + * @param type The type associated with the field. + * @param isSearchable Whether this field is indexed for search. + * @param isAggregatable Whether this field can be aggregated on. + * @param meta Metadata about the field. */ -public class IndexFieldCapabilities implements Writeable { - private static final StringLiteralDeduplicator typeStringDeduplicator = new StringLiteralDeduplicator(); - - private final String name; - private final String type; - private final boolean isMetadatafield; - private final boolean isSearchable; - private final boolean isAggregatable; - private final boolean isDimension; - private final TimeSeriesParams.MetricType metricType; - private final Map meta; +public record IndexFieldCapabilities( + String name, + String type, + boolean isMetadatafield, + boolean isSearchable, + boolean isAggregatable, + boolean isDimension, + TimeSeriesParams.MetricType metricType, + Map meta +) implements Writeable { - /** - * @param name The name of the field. - * @param type The type associated with the field. - * @param isSearchable Whether this field is indexed for search. - * @param isAggregatable Whether this field can be aggregated on. - * @param meta Metadata about the field. - */ - IndexFieldCapabilities( - String name, - String type, - boolean isMetadatafield, - boolean isSearchable, - boolean isAggregatable, - boolean isDimension, - TimeSeriesParams.MetricType metricType, - Map meta - ) { - this.name = name; - this.type = type; - this.isMetadatafield = isMetadatafield; - this.isSearchable = isSearchable; - this.isAggregatable = isAggregatable; - this.isDimension = isDimension; - this.metricType = metricType; - this.meta = meta; - } + private static final StringLiteralDeduplicator typeStringDeduplicator = new StringLiteralDeduplicator(); - IndexFieldCapabilities(StreamInput in) throws IOException { - this.name = in.readString(); - this.type = typeStringDeduplicator.deduplicate(in.readString()); - this.isMetadatafield = in.readBoolean(); - this.isSearchable = in.readBoolean(); - this.isAggregatable = in.readBoolean(); + public static IndexFieldCapabilities readFrom(StreamInput in) throws IOException { + String name = in.readString(); + String type = typeStringDeduplicator.deduplicate(in.readString()); + boolean isMetadatafield = in.readBoolean(); + boolean isSearchable = in.readBoolean(); + boolean isAggregatable = in.readBoolean(); + boolean isDimension; + TimeSeriesParams.MetricType metricType; if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - this.isDimension = in.readBoolean(); - this.metricType = in.readOptionalEnum(TimeSeriesParams.MetricType.class); + isDimension = in.readBoolean(); + metricType = in.readOptionalEnum(TimeSeriesParams.MetricType.class); } else { - this.isDimension = false; - this.metricType = null; + isDimension = false; + metricType = null; } - this.meta = in.readMap(StreamInput::readString); + return new IndexFieldCapabilities( + name, + type, + isMetadatafield, + isSearchable, + isAggregatable, + isDimension, + metricType, + in.readImmutableMap(StreamInput::readString) + ); } @Override @@ -92,55 +81,4 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(meta, StreamOutput::writeString); } - public String getName() { - return name; - } - - public String getType() { - return type; - } - - public boolean isMetadatafield() { - return isMetadatafield; - } - - public boolean isAggregatable() { - return isAggregatable; - } - - public boolean isSearchable() { - return isSearchable; - } - - public boolean isDimension() { - return isDimension; - } - - public TimeSeriesParams.MetricType getMetricType() { - return metricType; - } - - public Map meta() { - return meta; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - IndexFieldCapabilities that = (IndexFieldCapabilities) o; - return isMetadatafield == that.isMetadatafield - && isSearchable == that.isSearchable - && isAggregatable == that.isAggregatable - && isDimension == that.isDimension - && Objects.equals(metricType, that.metricType) - && Objects.equals(name, that.name) - && Objects.equals(type, that.type) - && Objects.equals(meta, that.meta); - } - - @Override - public int hashCode() { - return Objects.hash(name, type, isMetadatafield, isSearchable, isAggregatable, isDimension, metricType, meta); - } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java index 37313c435319c..d39dd28b32611 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java @@ -54,11 +54,11 @@ private static Function buildTra Set nestedObjects = null; if (allowedTypes.length > 0) { Set at = Set.of(allowedTypes); - test = test.and(ifc -> at.contains(ifc.getType())); + test = test.and(ifc -> at.contains(ifc.type())); } for (String filter : filters) { if ("-parent".equals(filter)) { - test = test.and(fc -> fc.getType().equals("nested") == false && fc.getType().equals("object") == false); + test = test.and(fc -> fc.type().equals("nested") == false && fc.type().equals("object") == false); } if ("-metadata".equals(filter)) { test = test.and(fc -> fc.isMetadatafield() == false); @@ -71,7 +71,7 @@ private static Function buildTra nestedObjects = findTypes("nested", input); } Set no = nestedObjects; - test = test.and(fc -> isNestedField(fc.getName(), no) == false); + test = test.and(fc -> isNestedField(fc.name(), no) == false); } if ("-multifield".equals(filter)) { // immediate parent is not an object field @@ -79,7 +79,7 @@ private static Function buildTra objects = findTypes("object", input); } Set o = objects; - test = test.and(fc -> isNotMultifield(fc.getName(), o)); + test = test.and(fc -> isNotMultifield(fc.name(), o)); } } Predicate finalTest = test; @@ -94,7 +94,7 @@ private static Function buildTra private static Set findTypes(String type, Map fieldCaps) { return fieldCaps.entrySet() .stream() - .filter(entry -> type.equals(entry.getValue().getType())) + .filter(entry -> type.equals(entry.getValue().type())) .map(Map.Entry::getKey) .collect(Collectors.toSet()); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 91c4f10956866..c9a44c14106ee 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -417,17 +417,14 @@ private static void innerMerge( final String field = entry.getKey(); final IndexFieldCapabilities fieldCap = entry.getValue(); Map typeMap = responseMapBuilder.computeIfAbsent(field, f -> new HashMap<>()); - FieldCapabilities.Builder builder = typeMap.computeIfAbsent( - fieldCap.getType(), - key -> new FieldCapabilities.Builder(field, key) - ); + FieldCapabilities.Builder builder = typeMap.computeIfAbsent(fieldCap.type(), key -> new FieldCapabilities.Builder(field, key)); builder.add( indices, fieldCap.isMetadatafield(), fieldCap.isSearchable(), fieldCap.isAggregatable(), fieldCap.isDimension(), - fieldCap.getMetricType(), + fieldCap.metricType(), fieldCap.meta() ); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index f9f5f43494711..ba785fd4d9637 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -471,9 +471,6 @@ public static class Clusters implements ToXContentFragment, Writeable { private final int total; private final int successful; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map private final int skipped; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int running; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int partial; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int failed; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map // key to map is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query // the Map itself is immutable after construction - all Clusters will be accounted for at the start of the search @@ -503,6 +500,8 @@ public Clusters( assert remoteClusterIndices.size() > 0 : "At least one remote cluster must be passed into this Cluster constructor"; this.total = remoteClusterIndices.size() + (localIndices == null ? 0 : 1); assert total >= 1 : "No local indices or remote clusters passed in"; + this.successful = 0; // calculated from clusterInfo map for minimize_roundtrips + this.skipped = 0; // calculated from clusterInfo map for minimize_roundtrips this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips; Map> m = new HashMap<>(); if (localIndices != null) { @@ -517,11 +516,6 @@ public Clusters( m.put(clusterAlias, new AtomicReference<>(c)); } this.clusterInfo = Collections.unmodifiableMap(m); - this.successful = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.SUCCESSFUL); - this.skipped = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.SKIPPED); - this.running = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.RUNNING); - this.partial = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.PARTIAL); - this.failed = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.FAILED); } /** @@ -539,36 +533,39 @@ public Clusters(int total, int successful, int skipped) { this.total = total; this.successful = successful; this.skipped = skipped; - this.running = 0; - this.partial = 0; - this.failed = 0; this.ccsMinimizeRoundtrips = false; this.clusterInfo = Collections.emptyMap(); // will never be used if created from this constructor } public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); - this.successful = in.readVInt(); - this.skipped = in.readVInt(); + int successfulTemp = in.readVInt(); + int skippedTemp = in.readVInt(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_053)) { List clusterList = in.readCollectionAsList(Cluster::new); if (clusterList.isEmpty()) { this.clusterInfo = Collections.emptyMap(); + this.successful = successfulTemp; + this.skipped = skippedTemp; } else { Map> m = new HashMap<>(); clusterList.forEach(c -> m.put(c.getClusterAlias(), new AtomicReference<>(c))); this.clusterInfo = Collections.unmodifiableMap(m); + this.successful = getClusterStateCount(Cluster.Status.SUCCESSFUL); + this.skipped = getClusterStateCount(Cluster.Status.SKIPPED); } } else { + this.successful = successfulTemp; + this.skipped = skippedTemp; this.clusterInfo = Collections.emptyMap(); } - this.running = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.RUNNING); - this.partial = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.PARTIAL); - this.failed = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.FAILED); + int running = getClusterStateCount(Cluster.Status.RUNNING); + int partial = getClusterStateCount(Cluster.Status.PARTIAL); + int failed = getClusterStateCount(Cluster.Status.FAILED); this.ccsMinimizeRoundtrips = false; assert total >= 0 : "total is negative: " + total; - assert total >= successful + skipped + running + partial + failed - : "successful + skipped + running + partial + failed is larger than total. total: " + assert total == successful + skipped + running + partial + failed + : "successful + skipped + running + partial + failed is not equal to total. total: " + total + " successful: " + successful @@ -586,11 +583,8 @@ private Clusters(Map> clusterInfoMap) { assert clusterInfoMap.size() > 0 : "this constructor should not be called with an empty Cluster info map"; this.total = clusterInfoMap.size(); this.clusterInfo = clusterInfoMap; - this.successful = 0; // calculated from clusterInfo map for minimize_roundtrips - this.skipped = 0; // calculated from clusterInfo map for minimize_roundtrips - this.running = 0; // calculated from clusterInfo map for minimize_roundtrips - this.partial = 0; // calculated from clusterInfo map for minimize_roundtrips - this.failed = 0; // calculated from clusterInfo map for minimize_roundtrips + this.successful = getClusterStateCount(Cluster.Status.SUCCESSFUL); + this.skipped = getClusterStateCount(Cluster.Status.SKIPPED); // should only be called if "details" section of fromXContent is present (for ccsMinimizeRoundtrips) this.ccsMinimizeRoundtrips = true; } @@ -705,11 +699,9 @@ public int getTotal() { public int getClusterStateCount(Cluster.Status status) { if (clusterInfo.isEmpty()) { return switch (status) { - case RUNNING -> running; case SUCCESSFUL -> successful; - case PARTIAL -> partial; case SKIPPED -> skipped; - case FAILED -> failed; + default -> 0; }; } else { return determineCountFromClusterInfo(cluster -> cluster.getStatus() == status); @@ -752,16 +744,23 @@ public boolean equals(Object o) { } Clusters clusters = (Clusters) o; return total == clusters.total - && successful == clusters.successful - && skipped == clusters.skipped - && running == clusters.running - && partial == clusters.partial - && failed == clusters.failed; + && getClusterStateCount(Cluster.Status.SUCCESSFUL) == clusters.getClusterStateCount(Cluster.Status.SUCCESSFUL) + && getClusterStateCount(Cluster.Status.SKIPPED) == clusters.getClusterStateCount(Cluster.Status.SKIPPED) + && getClusterStateCount(Cluster.Status.RUNNING) == clusters.getClusterStateCount(Cluster.Status.RUNNING) + && getClusterStateCount(Cluster.Status.PARTIAL) == clusters.getClusterStateCount(Cluster.Status.PARTIAL) + && getClusterStateCount(Cluster.Status.FAILED) == clusters.getClusterStateCount(Cluster.Status.FAILED); } @Override public int hashCode() { - return Objects.hash(total, successful, skipped, running, partial, failed); + return Objects.hash( + total, + getClusterStateCount(Cluster.Status.SUCCESSFUL), + getClusterStateCount(Cluster.Status.SKIPPED), + getClusterStateCount(Cluster.Status.RUNNING), + getClusterStateCount(Cluster.Status.PARTIAL), + getClusterStateCount(Cluster.Status.FAILED) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java index e04e8a47349b6..9d41dd86d2ceb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java @@ -402,7 +402,7 @@ private class ShardAllocationCounts { private final Map> diagnosisDefinitions = new HashMap<>(); public void increment(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { - boolean isNew = isUnassignedDueToNewInitialization(routing); + boolean isNew = isUnassignedDueToNewInitialization(routing, state); boolean isRestarting = isUnassignedDueToTimelyRestart(routing, shutdowns); available &= routing.active() || isRestarting || isNew; if ((routing.active() || isRestarting || isNew) == false) { @@ -454,8 +454,14 @@ private static boolean isUnassignedDueToTimelyRestart(ShardRouting routing, Node return now - restartingAllocationDelayExpiration <= 0; } - private static boolean isUnassignedDueToNewInitialization(ShardRouting routing) { - return routing.primary() && routing.active() == false && getInactivePrimaryHealth(routing) == ClusterHealthStatus.YELLOW; + private static boolean isUnassignedDueToNewInitialization(ShardRouting routing, ClusterState state) { + if (routing.active()) { + return false; + } + // If the primary is inactive for unexceptional events in the cluster lifecycle, both the primary and the + // replica are considered new initializations. + ShardRouting primary = routing.primary() ? routing : state.routingTable().shardRoutingTable(routing.shardId()).primaryShard(); + return primary.active() == false && getInactivePrimaryHealth(primary) == ClusterHealthStatus.YELLOW; } /** @@ -815,6 +821,7 @@ public String getSymptom() { || primaries.unassigned_new > 0 || primaries.unassigned_restarting > 0 || replicas.unassigned > 0 + || replicas.unassigned_new > 0 || replicas.unassigned_restarting > 0 || primaries.initializing > 0 || replicas.initializing > 0) { @@ -822,6 +829,7 @@ public String getSymptom() { Stream.of( createMessage(primaries.unassigned, "unavailable primary shard", "unavailable primary shards"), createMessage(primaries.unassigned_new, "creating primary shard", "creating primary shards"), + createMessage(replicas.unassigned_new, "creating replica shard", "creating replica shards"), createMessage(primaries.unassigned_restarting, "restarting primary shard", "restarting primary shards"), createMessage(replicas.unassigned, "unavailable replica shard", "unavailable replica shards"), createMessage(primaries.initializing, "initializing primary shard", "initializing primary shards"), @@ -861,6 +869,8 @@ public HealthIndicatorDetails getDetails(boolean verbose) { replicas.unassigned, "initializing_replicas", replicas.initializing, + "creating_replicas", + replicas.unassigned_new, "restarting_replicas", replicas.unassigned_restarting, "started_replicas", diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java index de061c7f314d6..91dbfc30123fe 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java @@ -160,7 +160,14 @@ public void writeTo(StreamOutput out) throws IOException { @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); + return BASE_RAM_BYTES_USED + bigArraysRamBytesUsed(); + } + + /** + * Memory used by the {@link BigArrays} portion of this {@link BytesRefArray}. + */ + public long bigArraysRamBytesUsed() { + return startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); } } diff --git a/server/src/main/java/org/elasticsearch/index/Index.java b/server/src/main/java/org/elasticsearch/index/Index.java index 85468326954d6..e11fd394d60a9 100644 --- a/server/src/main/java/org/elasticsearch/index/Index.java +++ b/server/src/main/java/org/elasticsearch/index/Index.java @@ -103,9 +103,14 @@ public void writeTo(final StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); + toXContentFragment(builder); + return builder.endObject(); + } + + public XContentBuilder toXContentFragment(final XContentBuilder builder) throws IOException { builder.field(INDEX_NAME_KEY, name); builder.field(INDEX_UUID_KEY, uuid); - return builder.endObject(); + return builder; } public static Index fromXContent(final XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index a9d90f80c8a18..350ac22c5e216 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -36,7 +36,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; @@ -1108,7 +1107,7 @@ public static Parameter> metaParam() { return new Parameter<>( "meta", true, - Collections::emptyMap, + Map::of, (n, c, o) -> TypeParsers.parseMeta(n, o), m -> m.fieldType().meta(), XContentBuilder::stringStringMap, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index d7fa0dae21b38..21ed56a82292c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -79,7 +79,9 @@ public MappedFieldType( this.isStored = isStored; this.docValues = hasDocValues; this.textSearchInfo = Objects.requireNonNull(textSearchInfo); - this.meta = Objects.requireNonNull(meta); + // meta should be sorted but for the one item or empty case we can fall back to immutable maps to save some memory since order is + // irrelevant + this.meta = meta.size() <= 1 ? Map.copyOf(meta) : meta; } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index c42c4df01c5fa..40c96b9976317 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -38,6 +38,9 @@ public static Map parseMeta(String name, Object metaObject) { } @SuppressWarnings("unchecked") Map meta = (Map) metaObject; + if (meta.isEmpty()) { + return Map.of(); + } if (meta.size() > 5) { throw new MapperParsingException("[meta] can't have more than 5 entries, but got " + meta.size() + " on field [" + name + "]"); } @@ -69,6 +72,12 @@ public static Map parseMeta(String name, Object metaObject) { ); } } + var entrySet = meta.entrySet(); + if (entrySet.size() == 1) { + // no need to sort for a single entry + var entry = entrySet.iterator().next(); + return Map.of(entry.getKey(), (String) entry.getValue()); + } Map sortedMeta = new TreeMap<>(); for (Map.Entry entry : meta.entrySet()) { sortedMeta.put(entry.getKey(), (String) entry.getValue()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java index 88a78a512d1bd..273df99f6479c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java @@ -53,6 +53,12 @@ public long getTimestamp() { } public int getTsidOrd() { + if (tsidOrdProvider == null) { + throw new IllegalArgumentException( + "Aggregation on a time-series field is misconfigured, likely due to lack of wrapping " + + "a metric aggregation within a `time-series` aggregation" + ); + } return tsidOrdProvider.getAsInt(); } } diff --git a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java index 0df8aeedac7f8..add994787227f 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java +++ b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java @@ -8,11 +8,15 @@ package org.elasticsearch.telemetry; +import org.elasticsearch.telemetry.metric.Meter; import org.elasticsearch.telemetry.tracing.Tracer; public interface TelemetryProvider { + Tracer getTracer(); + Meter getMeter(); + TelemetryProvider NOOP = new TelemetryProvider() { @Override @@ -20,5 +24,9 @@ public Tracer getTracer() { return Tracer.NOOP; } + @Override + public Meter getMeter() { + return Meter.NOOP; + } }; } diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java new file mode 100644 index 0000000000000..c98701bb0a1bb --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A monotonically increasing metric that uses a double. + * Useful for capturing the number of bytes received, number of requests, etc. + */ +public interface DoubleCounter extends Instrument { + /** + * Add one to the current counter. + */ + void increment(); + + /** + * Increment the counter. + * @param inc amount to increment, non-negative + */ + void incrementBy(double inc); + + /** + * Increment the counter. + * @param inc amount to increment, non-negative + * @param attributes key-value pairs to associate with this increment + */ + void incrementBy(double inc, Map attributes); + + /** + * Noop counter for use in tests. + */ + DoubleCounter NOOP = new DoubleCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void increment() { + + } + + @Override + public void incrementBy(double inc) { + + } + + @Override + public void incrementBy(double inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java new file mode 100644 index 0000000000000..797c125900bb8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record non-additive double values. eg number of running threads, current load + */ +public interface DoubleGauge extends Instrument { + /** + * Record the current value for measured item + */ + void record(double value); + + /** + * Record the current value + * @param attributes key-value pairs to associate with the current measurement + */ + void record(double value, Map attributes); + + /** + * Noop gauge for tests + */ + DoubleGauge NOOP = new DoubleGauge() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java new file mode 100644 index 0000000000000..11958ea36cd3d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record arbitrary values that are summarized statistically, useful for percentiles and histograms. + */ +public interface DoubleHistogram extends Instrument { + /** + * Record a sample for the measured item + * @param value + */ + void record(double value); + + /** + * Record a sample for the measured item + * @param attributes key-value pairs to associate with the current sample + */ + void record(double value, Map attributes); + + /** + * Noop histogram for tests + */ + DoubleHistogram NOOP = new DoubleHistogram() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java new file mode 100644 index 0000000000000..7d484ebf07d32 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A counter that supports decreasing and increasing values. + * Useful for capturing the number of requests in a queue. + */ +public interface DoubleUpDownCounter extends Instrument { + /** + * Add to the counter + * @param inc may be negative. + */ + void add(double inc); + + /** + * Add to the counter + * @param inc may be negative. + * @param attributes key-value pairs to associate with this increment + */ + void add(double inc, Map attributes); + + /** + * Noop counter for use in tests + */ + DoubleUpDownCounter NOOP = new DoubleUpDownCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void add(double inc) { + + } + + @Override + public void add(double inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java b/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java new file mode 100644 index 0000000000000..19a7e259120f2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +public interface Instrument { + String getName(); +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java new file mode 100644 index 0000000000000..f8f2150163835 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A monotonically increasing metric that uses a long. Useful for integral values such as the number of bytes received, + * number of requests, etc. + */ +public interface LongCounter extends Instrument { + /** + * Add one to the current counter + */ + void increment(); + + /** + * Increment the counter + * @param inc amount to increment + */ + void incrementBy(long inc); + + /** + * Increment the counter. + * @param inc amount to increment + * @param attributes key-value pairs to associate with this increment + */ + void incrementBy(long inc, Map attributes); + + /** + * Noop counter for use in tests. + */ + LongCounter NOOP = new LongCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void increment() { + + } + + @Override + public void incrementBy(long inc) { + + } + + @Override + public void incrementBy(long inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java new file mode 100644 index 0000000000000..71539064ce53e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record non-additive long values. + */ +public interface LongGauge extends Instrument { + + /** + * Record the current value of the measured item. + * @param value + */ + void record(long value); + + /** + * Record the current value + * @param attributes key-value pairs to associate with the current measurement + */ + void record(long value, Map attributes); + + /** + * Noop gauge for tests + */ + LongGauge NOOP = new LongGauge() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(long value) { + + } + + @Override + public void record(long value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java new file mode 100644 index 0000000000000..27d5261f755ef --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record arbitrary values that are summarized statistically, useful for percentiles and histograms. + */ +public interface LongHistogram extends Instrument { + /** + * Record a sample for the measured item + * @param value + */ + void record(long value); + + /** + * Record a sample for the measured item + * @param attributes key-value pairs to associate with the current sample + */ + void record(long value, Map attributes); + + /** + * Noop histogram for tests + */ + LongHistogram NOOP = new LongHistogram() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(long value) { + + } + + @Override + public void record(long value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java new file mode 100644 index 0000000000000..f62030da8f6bd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A counter that supports decreasing and increasing values. + * Useful for capturing the number of requests in a queue. + */ +public interface LongUpDownCounter extends Instrument { + /** + * Add to the counter + * @param inc may be negative. + */ + void add(long inc); + + /** + * Add to the counter + * @param inc may be negative. + * @param attributes key-value pairs to associate with this increment + */ + void add(long inc, Map attributes); + + /** + * Noop counter for use in tests + */ + LongUpDownCounter NOOP = new LongUpDownCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void add(long inc) { + + } + + @Override + public void add(long inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java new file mode 100644 index 0000000000000..77bbf6f673fd3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java @@ -0,0 +1,228 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +/** + * Container for metering instruments. Meters with the same name and type (DoubleCounter, etc) can + * only be registered once. + * TODO(stu): describe name, unit and description + */ +public interface Meter { + /** + * Register a {@link DoubleCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleCounter registerDoubleCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleCounter}. + * @param name name of the counter + * @return the registered meter. + */ + DoubleCounter getDoubleCounter(String name); + + /** + * Register a {@link DoubleUpDownCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleUpDownCounter}. + * @param name name of the counter + * @return the registered meter. + */ + DoubleUpDownCounter getDoubleUpDownCounter(String name); + + /** + * Register a {@link DoubleGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleGauge registerDoubleGauge(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleGauge}. + * @param name name of the gauge + * @return the registered meter. + */ + DoubleGauge getDoubleGauge(String name); + + /** + * Register a {@link DoubleHistogram}. The returned object may be reused. + * @param name name of the histogram + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleHistogram registerDoubleHistogram(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleHistogram}. + * @param name name of the histogram + * @return the registered meter. + */ + DoubleHistogram getDoubleHistogram(String name); + + /** + * Register a {@link LongCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongCounter registerLongCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongCounter}. + * @param name name of the counter + * @return the registered meter. + */ + LongCounter getLongCounter(String name); + + /** + * Register a {@link LongUpDownCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongUpDownCounter}. + * @param name name of the counter + * @return the registered meter. + */ + LongUpDownCounter getLongUpDownCounter(String name); + + /** + * Register a {@link LongGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongGauge registerLongGauge(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongGauge}. + * @param name name of the gauge + * @return the registered meter. + */ + LongGauge getLongGauge(String name); + + /** + * Register a {@link LongHistogram}. The returned object may be reused. + * @param name name of the histogram + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongHistogram registerLongHistogram(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongHistogram}. + * @param name name of the histogram + * @return the registered meter. + */ + LongHistogram getLongHistogram(String name); + + /** + * Noop implementation for tests + */ + Meter NOOP = new Meter() { + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + return DoubleCounter.NOOP; + } + + @Override + public DoubleCounter getDoubleCounter(String name) { + return DoubleCounter.NOOP; + } + + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + return DoubleUpDownCounter.NOOP; + } + + @Override + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return DoubleUpDownCounter.NOOP; + } + + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + return DoubleGauge.NOOP; + } + + @Override + public DoubleGauge getDoubleGauge(String name) { + return DoubleGauge.NOOP; + } + + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + return DoubleHistogram.NOOP; + } + + @Override + public DoubleHistogram getDoubleHistogram(String name) { + return DoubleHistogram.NOOP; + } + + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { + return LongCounter.NOOP; + } + + @Override + public LongCounter getLongCounter(String name) { + return LongCounter.NOOP; + } + + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + return LongUpDownCounter.NOOP; + } + + @Override + public LongUpDownCounter getLongUpDownCounter(String name) { + return LongUpDownCounter.NOOP; + } + + @Override + public LongGauge registerLongGauge(String name, String description, String unit) { + return LongGauge.NOOP; + } + + @Override + public LongGauge getLongGauge(String name) { + return LongGauge.NOOP; + } + + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { + return LongHistogram.NOOP; + } + + @Override + public LongHistogram getLongHistogram(String name) { + return LongHistogram.NOOP; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java index a95340e2fffd1..0802e498c43a7 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java @@ -161,8 +161,8 @@ public void testSerializeNodeResponseBetweenOldNodes() throws IOException { // Exclude metric types which was introduced in 8.0 assertThat(outCap.keySet(), equalTo(inCap.keySet())); for (String field : outCap.keySet()) { - assertThat(outCap.get(field).getName(), equalTo(inCap.get(field).getName())); - assertThat(outCap.get(field).getType(), equalTo(inCap.get(field).getType())); + assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); + assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index cc7c76553ef99..461000fc22b02 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -235,8 +235,8 @@ public void testSerializeCCSResponseBetweenOldClusters() throws IOException { // Exclude metric types which was introduced in 8.0 assertThat(outCap.keySet(), equalTo(inCap.keySet())); for (String field : outCap.keySet()) { - assertThat(outCap.get(field).getName(), equalTo(inCap.get(field).getName())); - assertThat(outCap.get(field).getType(), equalTo(inCap.get(field).getType())); + assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); + assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java index e6da007b085a3..708a3125590fd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -533,8 +533,11 @@ public void testShouldBeYellowWhenRestartingReplicasReachedAllocationDelay() { ); } - public void testShouldBeGreenWhenThereAreInitializingPrimaries() { - var clusterState = createClusterStateWith(List.of(index("restarting-index", new ShardAllocation("node-0", CREATING))), List.of()); + public void testShouldBeGreenWhenThereAreInitializingPrimariesAndReplicas() { + var clusterState = createClusterStateWith( + List.of(index("restarting-index", new ShardAllocation("node-0", CREATING), new ShardAllocation("node-1", CREATING))), + List.of() + ); var service = createShardsAvailabilityIndicatorService(clusterState); assertThat( @@ -542,8 +545,8 @@ public void testShouldBeGreenWhenThereAreInitializingPrimaries() { equalTo( createExpectedResult( GREEN, - "This cluster has 1 creating primary shard.", - Map.of("creating_primaries", 1), + "This cluster has 1 creating primary shard, 1 creating replica shard.", + Map.of("creating_primaries", 1, "creating_replicas", 1), emptyList(), emptyList() ) @@ -1765,6 +1768,8 @@ private static Map addDefaults(Map override) { override.getOrDefault("started_primaries", 0), "unassigned_replicas", override.getOrDefault("unassigned_replicas", 0), + "creating_replicas", + override.getOrDefault("creating_replicas", 0), "initializing_replicas", override.getOrDefault("initializing_replicas", 0), "restarting_replicas", diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 02e8cfd7f16fc..0a0592b5a01f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -678,6 +678,9 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu while (true) { long old = used.get(); long total = old + bytes; + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } if (total > max.getBytes()) { throw new CircuitBreakingException(ERROR_MESSAGE, bytes, max.getBytes(), Durability.TRANSIENT); } @@ -689,7 +692,10 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu @Override public void addWithoutBreaking(long bytes) { - used.addAndGet(bytes); + long total = used.addAndGet(bytes); + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java index 15ffa52569d00..bd5f974a5f800 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java @@ -15,6 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.test.ESTestCase; +import java.util.concurrent.atomic.AtomicLong; + /** * {@link CircuitBreakerService} that fails one twentieth of the time when you * add bytes. This is useful to make sure code responds sensibly to circuit @@ -27,31 +29,32 @@ public class CrankyCircuitBreakerService extends CircuitBreakerService { public static final String ERROR_MESSAGE = "cranky breaker"; private final CircuitBreaker breaker = new CircuitBreaker() { - @Override - public void circuitBreak(String fieldName, long bytesNeeded) { + private final AtomicLong used = new AtomicLong(); - } + @Override + public void circuitBreak(String fieldName, long bytesNeeded) {} @Override public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { if (ESTestCase.random().nextInt(20) == 0) { throw new CircuitBreakingException(ERROR_MESSAGE, Durability.PERMANENT); } + used.addAndGet(bytes); } @Override public void addWithoutBreaking(long bytes) { - + used.addAndGet(bytes); } @Override public long getUsed() { - return 0; + return used.get(); } @Override public long getLimit() { - return 0; + return Long.MAX_VALUE; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index d19fd4ecf08a3..5a28bd8b0ea6d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -59,8 +59,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -201,16 +199,8 @@ public abstract class AggregatorTestCase extends ESTestCase { @Before public final void initPlugins() { - int numThreads = randomIntBetween(2, 4); threadPool = new TestThreadPool(AggregatorTestCase.class.getName()); - threadPoolExecutor = EsExecutors.newFixed( - "test", - numThreads, - 10, - EsExecutors.daemonThreadFactory("test"), - threadPool.getThreadContext(), - randomFrom(TaskTrackingConfig.DEFAULT, TaskTrackingConfig.DO_NOT_TRACK) - ); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); List plugins = new ArrayList<>(getSearchPlugins()); plugins.add(new AggCardinalityUpperBoundPlugin()); SearchModule searchModule = new SearchModule(Settings.EMPTY, plugins); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidator.java index 49bdf60ad0b18..9962f14ec3736 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidator.java @@ -73,9 +73,6 @@ public final class SourceDestValidator { public static final String REMOTE_CLUSTERS_TRANSPORT_TOO_OLD = "remote clusters are expected to run at least transport version [{0}] (reason: [{1}])," + " but the following clusters were too old: [{2}]"; - public static final String REMOTE_CLUSTERS_CONFIG_TOO_OLD = - "remote clusters are expected to run at least config version [{0}] (reason: [{1}])," - + " but the following clusters were too old: [{2}]"; public static final String PIPELINE_MISSING = "Pipeline with id [{0}] could not be found"; private final IndexNameExpressionResolver indexNameExpressionResolver; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java index 75370db1d766f..e631e3efe5cb6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java @@ -268,7 +268,8 @@ public static MlConfigVersion max(MlConfigVersion version1, MlConfigVersion vers return version1.id > version2.id ? version1 : version2; } - public static MlConfigVersion fromVersion(Version version) { + // Visible only for testing + static MlConfigVersion fromVersion(Version version) { if (version.equals(Version.V_8_10_0)) { return V_10; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 7f5de886222ba..cacc4a6a33196 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -9,6 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.SimpleDiffable; @@ -36,7 +38,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.time.TimeUtils; -import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlStrings; @@ -86,7 +87,7 @@ */ public class DatafeedConfig implements SimpleDiffable, ToXContentObject { - private static final MlConfigVersion RUNTIME_MAPPINGS_INTRODUCED = MlConfigVersion.V_7_11_0; + private static final TransportVersion RUNTIME_MAPPINGS_INTRODUCED = TransportVersions.V_7_11_0; public static final int DEFAULT_SCROLL_SIZE = 1000; @@ -340,7 +341,7 @@ public Integer getScrollSize() { return scrollSize; } - public Optional> minRequiredConfigVersion() { + public Optional> minRequiredTransportVersion() { return runtimeMappings.isEmpty() ? Optional.empty() : Optional.of(Tuple.tuple(RUNTIME_MAPPINGS_INTRODUCED, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 9d4442f877b85..9dfa2d51f0fc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -103,6 +103,7 @@ public class TrainedModelConfig implements ToXContentObject, Writeable { public static final ParseField PER_DEPLOYMENT_MEMORY_BYTES = new ParseField("per_deployment_memory_bytes"); public static final ParseField PER_ALLOCATION_MEMORY_BYTES = new ParseField("per_allocation_memory_bytes"); + public static final ParseField PLATFORM_ARCHITECTURE = new ParseField("platform_architecture"); public static final TransportVersion VERSION_3RD_PARTY_CONFIG_ADDED = TransportVersions.V_8_0_0; public static final TransportVersion VERSION_ALLOCATION_MEMORY_ADDED = TransportVersions.V_8_500_064; @@ -168,6 +169,7 @@ private static ObjectParser createParser(boole (p, c) -> ignoreUnknownFields ? ModelPackageConfig.fromXContentLenient(p) : ModelPackageConfig.fromXContentStrict(p), MODEL_PACKAGE ); + parser.declareString(TrainedModelConfig.Builder::setPlatformArchitecture, PLATFORM_ARCHITECTURE); return parser; } @@ -195,6 +197,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo private final TrainedModelLocation location; private final ModelPackageConfig modelPackageConfig; private Boolean fullDefinition; + private String platformArchitecture; TrainedModelConfig( String modelId, @@ -213,7 +216,8 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo Map defaultFieldMap, InferenceConfig inferenceConfig, TrainedModelLocation location, - ModelPackageConfig modelPackageConfig + ModelPackageConfig modelPackageConfig, + String platformArchitecture ) { this.modelId = ExceptionsHelper.requireNonNull(modelId, MODEL_ID); this.modelType = modelType; @@ -240,6 +244,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo this.inferenceConfig = inferenceConfig; this.location = location; this.modelPackageConfig = modelPackageConfig; + this.platformArchitecture = platformArchitecture; } private static TrainedModelInput handleDefaultInput(TrainedModelInput input, TrainedModelType modelType) { @@ -279,6 +284,11 @@ public TrainedModelConfig(StreamInput in) throws IOException { modelPackageConfig = null; fullDefinition = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + platformArchitecture = in.readOptionalString(); + } else { + platformArchitecture = null; + } } public boolean isPackagedModel() { @@ -421,6 +431,10 @@ public long getPerAllocationMemoryBytes() { : 0L; } + public String getPlatformArchitecture() { + return platformArchitecture; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); @@ -451,6 +465,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(modelPackageConfig); out.writeOptionalBoolean(fullDefinition); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + out.writeOptionalString(platformArchitecture); + } } @Override @@ -463,6 +481,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (modelPackageConfig != null) { builder.field(MODEL_PACKAGE.getPreferredName(), modelPackageConfig); } + if (platformArchitecture != null) { + builder.field(PLATFORM_ARCHITECTURE.getPreferredName(), platformArchitecture); + } // If the model is to be exported for future import to another cluster, these fields are irrelevant. if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) { @@ -543,7 +564,8 @@ public boolean equals(Object o) { && Objects.equals(defaultFieldMap, that.defaultFieldMap) && Objects.equals(inferenceConfig, that.inferenceConfig) && Objects.equals(metadata, that.metadata) - && Objects.equals(location, that.location); + && Objects.equals(location, that.location) + && Objects.equals(platformArchitecture, that.platformArchitecture); } @Override @@ -565,7 +587,8 @@ public int hashCode() { licenseLevel, inferenceConfig, defaultFieldMap, - location + location, + platformArchitecture ); } @@ -590,6 +613,7 @@ public static class Builder { private ModelPackageConfig modelPackageConfig; private Long perDeploymentMemoryBytes; private Long perAllocationMemoryBytes; + private String platformArchitecture; public Builder() {} @@ -611,6 +635,7 @@ public Builder(TrainedModelConfig config) { this.inferenceConfig = config.inferenceConfig; this.location = config.location; this.modelPackageConfig = config.modelPackageConfig; + this.platformArchitecture = config.platformArchitecture; } public Builder setModelId(String modelId) { @@ -703,6 +728,11 @@ public Builder setHyperparameters(List hyperparameters) { return addToMetadata(HYPERPARAMETERS, hyperparameters.stream().map(Hyperparameters::asMap).collect(Collectors.toList())); } + public Builder setPlatformArchitecture(String platformArchitecture) { + this.platformArchitecture = platformArchitecture; + return this; + } + public Builder setModelAliases(Set modelAliases) { if (modelAliases == null || modelAliases.isEmpty()) { return this; @@ -1022,7 +1052,8 @@ public TrainedModelConfig build() { defaultFieldMap, inferenceConfig, location, - modelPackageConfig + modelPackageConfig, + platformArchitecture ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java index ca70f9e9e761d..6c8fc6fec4e0e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java @@ -42,11 +42,15 @@ public final class InferenceIndexConstants { public static final ParseField DOC_TYPE = new ParseField("doc_type"); private static final String NATIVE_INDEX_PREFIX = INDEX_NAME_PREFIX + "native-"; - private static final String NATIVE_INDEX_VERSION = "000001"; + + // 000002 added support for platform specific models + private static final String NATIVE_INDEX_VERSION = "000002"; private static final String NATIVE_LATEST_INDEX = NATIVE_INDEX_PREFIX + NATIVE_INDEX_VERSION; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; - public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 1; + + // 2 added support for platform specific models + public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 2; public static String mapping() { return TemplateUtils.loadTemplate( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java index acc10008cd40f..3d6a1aef8477a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java @@ -302,7 +302,8 @@ public static TransformConfigVersion max(TransformConfigVersion version1, Transf return version1.id > version2.id ? version1 : version2; } - public static TransformConfigVersion fromVersion(Version version) { + // Visible only for testing + static TransformConfigVersion fromVersion(Version version) { if (version.equals(Version.V_8_10_0)) { return V_10; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java index 51f65fe0c9a0b..8b382beeb0644 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -104,7 +105,8 @@ public static TrainedModelConfig.Builder createTestInstance(String modelId, bool .setLicenseLevel(randomFrom(License.OperationMode.PLATINUM.description(), License.OperationMode.BASIC.description())) .setInferenceConfig(randomFrom(inferenceConfigs)) .setTags(tags) - .setLocation(randomBoolean() ? null : IndexLocationTests.randomInstance()); + .setLocation(randomBoolean() ? null : IndexLocationTests.randomInstance()) + .setPlatformArchitecture(randomBoolean() ? null : randomAlphaOfLength(10)); } @Before @@ -191,7 +193,8 @@ public void testToXContentWithParams() throws IOException { .collect(Collectors.toMap(Function.identity(), (k) -> randomAlphaOfLength(10))), randomFrom(ClassificationConfigTests.randomClassificationConfig(), RegressionConfigTests.randomRegressionConfig()), null, - ModelPackageConfigTests.randomModulePackageConfig() + ModelPackageConfigTests.randomModulePackageConfig(), + randomAlphaOfLength(10) ); BytesReference reference = XContentHelper.toXContent(config, XContentType.JSON, ToXContent.EMPTY_PARAMS, false); @@ -241,7 +244,8 @@ public void testParseWithBothDefinitionAndCompressedSupplied() throws IOExceptio .collect(Collectors.toMap(Function.identity(), (k) -> randomAlphaOfLength(10))), randomFrom(ClassificationConfigTests.randomClassificationConfig(), RegressionConfigTests.randomRegressionConfig()), null, - ModelPackageConfigTests.randomModulePackageConfig() + ModelPackageConfigTests.randomModulePackageConfig(), + randomAlphaOfLength(10) ); BytesReference reference = XContentHelper.toXContent(config, XContentType.JSON, ToXContent.EMPTY_PARAMS, false); @@ -453,6 +457,9 @@ protected TrainedModelConfig mutateInstanceForVersion(TrainedModelConfig instanc if (instance.getInferenceConfig() instanceof NlpConfig nlpConfig) { builder.setInferenceConfig(InferenceConfigItemTestCase.mutateForVersion(nlpConfig, version)); } + if (version.before(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + builder.setPlatformArchitecture(null); + } return builder.build(); } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json index 7ff961a0aac9c..77634546e0e6e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json @@ -12,6 +12,9 @@ "model_id": { "type": "keyword" }, + "platform_architecture" : { + "type" : "keyword" + }, "created_by": { "type": "keyword" }, diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index af42d94c236f2..d5e38127cdec7 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -13,8 +13,6 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; -import java.util.BitSet; - import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; @@ -23,18 +21,13 @@ import static org.elasticsearch.compute.gen.Methods.appendMethod; import static org.elasticsearch.compute.gen.Methods.getMethod; import static org.elasticsearch.compute.gen.Types.ABSTRACT_CONVERT_FUNCTION_EVALUATOR; -import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BYTES_REF; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_ARRAY; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_BLOCK; +import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.VECTOR; -import static org.elasticsearch.compute.gen.Types.arrayBlockType; -import static org.elasticsearch.compute.gen.Types.arrayVectorType; import static org.elasticsearch.compute.gen.Types.blockType; -import static org.elasticsearch.compute.gen.Types.constantVectorType; import static org.elasticsearch.compute.gen.Types.vectorType; public class ConvertEvaluatorImplementer { @@ -79,6 +72,8 @@ private TypeSpec type() { builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.superclass(ABSTRACT_CONVERT_FUNCTION_EVALUATOR); + builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); + builder.addMethod(ctor()); builder.addMethod(name()); builder.addMethod(evalVector()); @@ -92,7 +87,9 @@ private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); builder.addParameter(EXPRESSION_EVALUATOR, "field"); builder.addParameter(SOURCE, "source"); + builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addStatement("super($N, $N)", "field", "source"); + builder.addStatement("this.driverContext = driverContext"); return builder.build(); } @@ -121,9 +118,9 @@ private MethodSpec evalVector() { { builder.beginControlFlow("try"); { - var constVectType = constantVectorType(resultType); + var constVectType = blockType(resultType); builder.addStatement( - "return new $T($N, positionCount).asBlock()", + "return driverContext.blockFactory().newConstant$TWith($N, positionCount)", constVectType, evalValueCall("vector", "0", scratchPadName) ); @@ -131,59 +128,34 @@ private MethodSpec evalVector() { builder.nextControlFlow("catch (Exception e)"); { builder.addStatement("registerException(e)"); - builder.addStatement("return Block.constantNullBlock(positionCount)"); + builder.addStatement("return Block.constantNullBlock(positionCount, driverContext.blockFactory())"); } builder.endControlFlow(); } builder.endControlFlow(); - builder.addStatement("$T nullsMask = null", BitSet.class); - if (resultType.equals(BYTES_REF)) { - builder.addStatement( - "$T values = new $T(positionCount, $T.NON_RECYCLING_INSTANCE)", // TODO: see note in MvEvaluatorImplementer - BYTES_REF_ARRAY, - BYTES_REF_ARRAY, - BIG_ARRAYS - ); - } else { - builder.addStatement("$T[] values = new $T[positionCount]", resultType, resultType); - } + ClassName returnBlockType = blockType(resultType); + builder.addStatement( + "$T.Builder builder = $T.newBlockBuilder(positionCount, driverContext.blockFactory())", + returnBlockType, + returnBlockType + ); builder.beginControlFlow("for (int p = 0; p < positionCount; p++)"); { builder.beginControlFlow("try"); { - if (resultType.equals(BYTES_REF)) { - builder.addStatement("values.append($N)", evalValueCall("vector", "p", scratchPadName)); - } else { - builder.addStatement("values[p] = $N", evalValueCall("vector", "p", scratchPadName)); - } + builder.addStatement("builder.$L($N)", appendMethod(resultType), evalValueCall("vector", "p", scratchPadName)); } builder.nextControlFlow("catch (Exception e)"); { builder.addStatement("registerException(e)"); - builder.beginControlFlow("if (nullsMask == null)"); - { - builder.addStatement("nullsMask = new BitSet(positionCount)"); - } - builder.endControlFlow(); - builder.addStatement("nullsMask.set(p)"); - if (resultType.equals(BYTES_REF)) { - builder.addStatement("values.append($T.NULL_VALUE)", BYTES_REF_BLOCK); - } + builder.addStatement("builder.appendNull()"); } builder.endControlFlow(); } builder.endControlFlow(); - builder.addStatement( - """ - return nullsMask == null - ? new $T(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new $T(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED)""", - arrayVectorType(resultType), - arrayBlockType(resultType) - ); + builder.addStatement("return builder.build()"); return builder.build(); } @@ -196,7 +168,11 @@ private MethodSpec evalBlock() { builder.addStatement("$T block = ($T) b", blockType, blockType); builder.addStatement("int positionCount = block.getPositionCount()"); TypeName resultBlockType = blockType(resultType); - builder.addStatement("$T.Builder builder = $T.newBlockBuilder(positionCount)", resultBlockType, resultBlockType); + builder.addStatement( + "$T.Builder builder = $T.newBlockBuilder(positionCount, driverContext.blockFactory())", + resultBlockType, + resultBlockType + ); String scratchPadName = null; if (argumentType.equals(BYTES_REF)) { scratchPadName = "scratchPad"; diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 86ae6d3f46789..0e29cc7673fee 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -261,7 +261,7 @@ private MethodSpec eval(String name, boolean nullable) { if (ascendingFunction == null) { return; } - builder.beginControlFlow("if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING)"); + builder.beginControlFlow("if (fieldVal.mvSortedAscending())"); builder.addStatement("return $L(fieldVal)", name.replace("eval", "evalAscending")); builder.endControlFlow(); }, builder -> { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index d8a5e471aaf84..b6e36e698355b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -75,6 +75,7 @@ public BooleanBlock expand() { public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index a7d397fcfb98e..98b9fdb948bc0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB BooleanBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new boolean[initialSize]; } @@ -192,8 +194,16 @@ public BooleanBlock build() { block = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index 45c74ee6e06d4..3792e39275f82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public BooleanVector build() { + finish(); BooleanVector vector; if (valueCount == 1) { vector = new ConstantBooleanVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public BooleanVector build() { } vector = new BooleanArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 30146d4e55c02..1428a1a221fa3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -58,4 +58,12 @@ public BooleanVector build() { } return new BooleanArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index e4ee70cd27a47..db5b5d3fcf804 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -77,6 +77,7 @@ public BytesRefBlock expand() { public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -115,7 +116,7 @@ public void close() { throw new IllegalStateException("can't release already released block [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index c8f5276a99db5..1692bfc59358a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -85,7 +85,7 @@ public String toString() { @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 23c18d2a9ca6e..a60b26667eb79 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -193,19 +193,42 @@ public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { public BytesRefBlock build() { finish(); BytesRefBlock block; + assert estimatedBytes == 0 || firstValueIndexes != null; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); if (isDense() && singleValued()) { block = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); } else { block = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return block; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index f37ffb2a7e28a..5ea9a2b7d0184 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -54,16 +54,40 @@ protected void growValuesArray(int newSize) { @Override public BytesRefVector build() { + finish(); BytesRefVector vector; + assert estimatedBytes == 0; if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); vector = new BytesRefArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return vector; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index b0de974a85c24..675952a8d6a85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -75,6 +75,7 @@ public DoubleBlock expand() { public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index a97f58f3924b1..dca8fe2d0d2e6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo DoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new double[initialSize]; } @@ -192,8 +194,16 @@ public DoubleBlock build() { block = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index f92ec67aec012..12fa06a944fbc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public DoubleVector build() { + finish(); DoubleVector vector; if (valueCount == 1) { vector = new ConstantDoubleVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public DoubleVector build() { } vector = new DoubleArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index 83992ed71b720..b636d9eb19756 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -58,4 +58,12 @@ public DoubleVector build() { } return new DoubleArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 7a345941df019..4170009b89ab2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -75,6 +75,7 @@ public IntBlock expand() { public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 53d379d715c9b..ba96f85e73197 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui IntBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new int[initialSize]; } @@ -192,8 +194,16 @@ public IntBlock build() { block = new IntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 0533d5463a4e7..155adfec02b9f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public IntVector build() { + finish(); IntVector vector; if (valueCount == 1) { vector = new ConstantIntVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public IntVector build() { } vector = new IntArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 19303b4024869..03a15fb10a800 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -58,4 +58,12 @@ public IntVector build() { } return new IntArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 21c6b445cd37d..778ec4294180c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -75,6 +75,7 @@ public LongBlock expand() { public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index a378b382ce31e..09d858e7c9b03 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B LongBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new long[initialSize]; } @@ -192,8 +194,16 @@ public LongBlock build() { block = new LongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index 6b2e9f1de7d51..3b8bbf4219d00 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public LongVector build() { + finish(); LongVector vector; if (valueCount == 1) { vector = new ConstantLongVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public LongVector build() { } vector = new LongArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 5414b7669f588..0960d607d9c0d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -58,4 +58,12 @@ public LongVector build() { } return new LongArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java index 48aec38b800ce..89c15d9eeab72 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java @@ -44,7 +44,7 @@ public MultivalueDedupeBytesRef(BytesRefBlock block) { * {@link Block} using an adaptive algorithm based on the size of the input list. */ public BytesRefBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); @@ -92,7 +92,7 @@ public BytesRefBlock dedupeToBlockAdaptive() { * which picks based on the number of elements at each position. */ public BytesRefBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); @@ -120,7 +120,7 @@ public BytesRefBlock dedupeToBlockUsingCopyAndSort() { * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ public BytesRefBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java index d30292f6fa32c..22f5cef2d57d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java @@ -41,7 +41,7 @@ public MultivalueDedupeDouble(DoubleBlock block) { * {@link Block} using an adaptive algorithm based on the size of the input list. */ public DoubleBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); @@ -89,7 +89,7 @@ public DoubleBlock dedupeToBlockAdaptive() { * which picks based on the number of elements at each position. */ public DoubleBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); @@ -117,7 +117,7 @@ public DoubleBlock dedupeToBlockUsingCopyAndSort() { * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ public DoubleBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java index cda9308a7e6d2..be6d08cfc39d7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java @@ -40,7 +40,7 @@ public MultivalueDedupeInt(IntBlock block) { * {@link Block} using an adaptive algorithm based on the size of the input list. */ public IntBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); @@ -88,7 +88,7 @@ public IntBlock dedupeToBlockAdaptive() { * which picks based on the number of elements at each position. */ public IntBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); @@ -116,7 +116,7 @@ public IntBlock dedupeToBlockUsingCopyAndSort() { * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ public IntBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java index 0266131fba37c..d4da43f93d503 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java @@ -42,7 +42,7 @@ public MultivalueDedupeLong(LongBlock block) { * {@link Block} using an adaptive algorithm based on the size of the input list. */ public LongBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); @@ -90,7 +90,7 @@ public LongBlock dedupeToBlockAdaptive() { * which picks based on the number of elements at each position. */ public LongBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); @@ -118,7 +118,7 @@ public LongBlock dedupeToBlockUsingCopyAndSort() { * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ public LongBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java index 0bdc5ac620eb0..40fe7ffdde661 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForBoolean extractorFor(TopNEncoder encoder, boolean ascendin return new KeyExtractorForBoolean.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBoolean.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBoolean.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBoolean.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBoolean.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java index accce46f38e30..2f546a46aaeaf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -20,11 +19,11 @@ static KeyExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean ascendi return new KeyExtractorForBytesRef.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBytesRef.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBytesRef.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBytesRef.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBytesRef.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java index 2f2968da16d83..5e821b9e24db5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForDouble extractorFor(TopNEncoder encoder, boolean ascending return new KeyExtractorForDouble.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForDouble.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForDouble.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForDouble.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForDouble.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java index 400c43168277d..d4269a622f098 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForInt extractorFor(TopNEncoder encoder, boolean ascending, b return new KeyExtractorForInt.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForInt.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForInt.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForInt.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForInt.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java index 843efdd95471f..6a200efff529d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForLong extractorFor(TopNEncoder encoder, boolean ascending, return new KeyExtractorForLong.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForLong.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForLong.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForLong.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForLong.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 50cef0417dd45..3d568adc2b5ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; class ResultBuilderForBoolean implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForBoolean implements ResultBuilder { */ private boolean key; - ResultBuilderForBoolean(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBoolean(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = BooleanBlock.newBlockBuilder(initialSize); + this.builder = BooleanBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public BooleanBlock build() { public String toString() { return "ResultBuilderForBoolean[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index 55f324c931b67..e37f82f3363a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; class ResultBuilderForBytesRef implements ResultBuilder { @@ -24,10 +25,10 @@ class ResultBuilderForBytesRef implements ResultBuilder { */ private BytesRef key; - ResultBuilderForBytesRef(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBytesRef(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { this.encoder = encoder; this.inKey = inKey; - this.builder = BytesRefBlock.newBlockBuilder(initialSize); + this.builder = BytesRefBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -67,4 +68,9 @@ public BytesRefBlock build() { public String toString() { return "ResultBuilderForBytesRef[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index ed4a9b45d90dc..77c976c6e0085 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; class ResultBuilderForDouble implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForDouble implements ResultBuilder { */ private double key; - ResultBuilderForDouble(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForDouble(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = DoubleBlock.newBlockBuilder(initialSize); + this.builder = DoubleBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public DoubleBlock build() { public String toString() { return "ResultBuilderForDouble[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 2bcfc81107445..389ed3bc2e3c3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; class ResultBuilderForInt implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForInt implements ResultBuilder { */ private int key; - ResultBuilderForInt(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForInt(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = IntBlock.newBlockBuilder(initialSize); + this.builder = IntBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public IntBlock build() { public String toString() { return "ResultBuilderForInt[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index 3ada85bf9d5c9..63ee9d35c59e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; class ResultBuilderForLong implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForLong implements ResultBuilder { */ private long key; - ResultBuilderForLong(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForLong(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = LongBlock.newBlockBuilder(initialSize); + this.builder = LongBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public LongBlock build() { public String toString() { return "ResultBuilderForLong[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index 3a52beb9c2d87..f3ca16869898f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -82,7 +82,8 @@ public DoubleBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new DoubleBlock[] { new DoubleArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new DoubleBlock[] { + new DoubleArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 4fcd9735f6158..08b2cd15aa53e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -78,7 +78,7 @@ public IntBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new IntBlock[] { new IntArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new IntBlock[] { new IntArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); final int[] keys = new int[size]; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 5e5b46ae6eda1..00e93db9cec00 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -82,7 +82,8 @@ public LongBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new LongBlock[] { new LongArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new LongBlock[] { + new LongArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index 31f65e9b70053..7ecaddf2092fa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -22,8 +22,6 @@ import org.elasticsearch.compute.operator.BatchEncoder; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupe; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import java.util.Arrays; import java.util.List; @@ -51,19 +49,20 @@ * } */ final class PackedValuesBlockHash extends BlockHash { - private static final Logger logger = LogManager.getLogger(PackedValuesBlockHash.class); static final int DEFAULT_BATCH_SIZE = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes()); - private final List groups; private final int emitBatchSize; private final BytesRefHash bytesRefHash; private final int nullTrackingBytes; + private final BytesRef scratch = new BytesRef(); + private final BytesRefBuilder bytes = new BytesRefBuilder(); + private final Group[] groups; - PackedValuesBlockHash(List groups, BigArrays bigArrays, int emitBatchSize) { - this.groups = groups; + PackedValuesBlockHash(List specs, BigArrays bigArrays, int emitBatchSize) { + this.groups = specs.stream().map(Group::new).toArray(Group[]::new); this.emitBatchSize = emitBatchSize; this.bytesRefHash = new BytesRefHash(1, bigArrays); - this.nullTrackingBytes = groups.size() / 8 + 1; + this.nullTrackingBytes = (groups.length + 7) / 8; } @Override @@ -75,23 +74,28 @@ void add(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) new AddWork(page, addInput, batchSize).add(); } + private static class Group { + final HashAggregationOperator.GroupSpec spec; + BatchEncoder encoder; + int positionOffset; + int valueOffset; + int loopedIndex; + int valueCount; + int bytesStart; + + Group(HashAggregationOperator.GroupSpec spec) { + this.spec = spec; + } + } + class AddWork extends LongLongBlockHash.AbstractAddBlock { - final BatchEncoder[] encoders = new BatchEncoder[groups.size()]; - final int[] positionOffsets = new int[groups.size()]; - final int[] valueOffsets = new int[groups.size()]; - final BytesRef[] scratches = new BytesRef[groups.size()]; - final BytesRefBuilder bytes = new BytesRefBuilder(); final int positionCount; - int position; - int count; - int bufferedGroup; AddWork(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) { super(emitBatchSize, addInput); - for (int g = 0; g < groups.size(); g++) { - encoders[g] = MultivalueDedupe.batchEncoder(page.getBlock(groups.get(g).channel()), batchSize); - scratches[g] = new BytesRef(); + for (Group group : groups) { + group.encoder = MultivalueDedupe.batchEncoder(page.getBlock(group.spec.channel()), batchSize); } bytes.grow(nullTrackingBytes); this.positionCount = page.getPositionCount(); @@ -104,91 +108,86 @@ class AddWork extends LongLongBlockHash.AbstractAddBlock { */ void add() { for (position = 0; position < positionCount; position++) { - if (logger.isTraceEnabled()) { - logger.trace("position {}", position); - } // Make sure all encoders have encoded the current position and the offsets are queued to it's start - for (int g = 0; g < encoders.length; g++) { - positionOffsets[g]++; - while (positionOffsets[g] >= encoders[g].positionCount()) { - encoders[g].encodeNextBatch(); - positionOffsets[g] = 0; - valueOffsets[g] = 0; + boolean singleEntry = true; + for (Group g : groups) { + var encoder = g.encoder; + g.positionOffset++; + while (g.positionOffset >= encoder.positionCount()) { + encoder.encodeNextBatch(); + g.positionOffset = 0; + g.valueOffset = 0; } + g.valueCount = encoder.valueCount(g.positionOffset); + singleEntry &= (g.valueCount == 1); } - - count = 0; Arrays.fill(bytes.bytes(), 0, nullTrackingBytes, (byte) 0); bytes.setLength(nullTrackingBytes); - addPosition(0); - switch (count) { - case 0 -> throw new IllegalStateException("didn't find any values"); - case 1 -> { - ords.appendInt(bufferedGroup); - addedValue(position); - } - default -> ords.endPositionEntry(); - } - for (int g = 0; g < encoders.length; g++) { - valueOffsets[g] += encoders[g].valueCount(positionOffsets[g]); + if (singleEntry) { + addSingleEntry(); + } else { + addMultipleEntries(); } } emitOrds(); } - private void addPosition(int g) { - if (g == groups.size()) { - addBytes(); - return; - } - int start = bytes.length(); - int count = encoders[g].valueCount(positionOffsets[g]); - assert count > 0; - int valueOffset = valueOffsets[g]; - BytesRef v = encoders[g].read(valueOffset++, scratches[g]); - if (logger.isTraceEnabled()) { - logger.trace("\t".repeat(g + 1) + v); - } - if (v.length == 0) { - assert count == 1 : "null value in non-singleton list"; - int nullByte = g / 8; - int nullShift = g % 8; - bytes.bytes()[nullByte] |= (byte) (1 << nullShift); - } - bytes.setLength(start); - bytes.append(v); - addPosition(g + 1); // TODO stack overflow protection - for (int i = 1; i < count; i++) { - v = encoders[g].read(valueOffset++, scratches[g]); - if (logger.isTraceEnabled()) { - logger.trace("\t".repeat(g + 1) + v); + private void addSingleEntry() { + for (int g = 0; g < groups.length; g++) { + Group group = groups[g]; + BytesRef v = group.encoder.read(group.valueOffset++, scratch); + if (v.length == 0) { + int nullByte = g / 8; + int nullShift = g % 8; + bytes.bytes()[nullByte] |= (byte) (1 << nullShift); + } else { + bytes.append(v); } - assert v.length > 0 : "null value after the first position"; - bytes.setLength(start); - bytes.append(v); - addPosition(g + 1); } + int ord = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); + ords.appendInt(ord); + addedValue(position); } - private void addBytes() { - int group = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); - switch (count) { - case 0 -> bufferedGroup = group; - case 1 -> { - ords.beginPositionEntry(); - ords.appendInt(bufferedGroup); - addedValueInMultivaluePosition(position); - ords.appendInt(group); - addedValueInMultivaluePosition(position); + private void addMultipleEntries() { + ords.beginPositionEntry(); + int g = 0; + outer: for (;;) { + for (; g < groups.length; g++) { + Group group = groups[g]; + group.bytesStart = bytes.length(); + BytesRef v = group.encoder.read(group.valueOffset + group.loopedIndex, scratch); + ++group.loopedIndex; + if (v.length == 0) { + assert group.valueCount == 1 : "null value in non-singleton list"; + int nullByte = g / 8; + int nullShift = g % 8; + bytes.bytes()[nullByte] |= (byte) (1 << nullShift); + } else { + bytes.append(v); + } } - default -> { - ords.appendInt(group); - addedValueInMultivaluePosition(position); + // emit ords + int ord = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); + ords.appendInt(ord); + addedValueInMultivaluePosition(position); + + // rewind + Group group = groups[--g]; + bytes.setLength(group.bytesStart); + while (group.loopedIndex == group.valueCount) { + group.loopedIndex = 0; + if (g == 0) { + break outer; + } else { + group = groups[--g]; + bytes.setLength(group.bytesStart); + } } } - count++; - if (logger.isTraceEnabled()) { - logger.trace("{} = {}", bytes.get(), group); + ords.endPositionEntry(); + for (Group group : groups) { + group.valueOffset += group.valueCount; } } } @@ -196,16 +195,16 @@ private void addBytes() { @Override public Block[] getKeys() { int size = Math.toIntExact(bytesRefHash.size()); - BatchEncoder.Decoder[] decoders = new BatchEncoder.Decoder[groups.size()]; - Block.Builder[] builders = new Block.Builder[groups.size()]; + BatchEncoder.Decoder[] decoders = new BatchEncoder.Decoder[groups.length]; + Block.Builder[] builders = new Block.Builder[groups.length]; for (int g = 0; g < builders.length; g++) { - ElementType elementType = groups.get(g).elementType(); + ElementType elementType = groups[g].spec.elementType(); decoders[g] = BatchEncoder.decoder(elementType); builders[g] = elementType.newBlockBuilder(size); } - BytesRef values[] = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; - BytesRef nulls[] = new BytesRef[values.length]; + BytesRef[] values = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; + BytesRef[] nulls = new BytesRef[values.length]; for (int offset = 0; offset < values.length; offset++) { values[offset] = new BytesRef(); nulls[offset] = new BytesRef(); @@ -231,7 +230,7 @@ public Block[] getKeys() { readKeys(decoders, builders, nulls, values, offset); } - Block[] keyBlocks = new Block[groups.size()]; + Block[] keyBlocks = new Block[groups.length]; for (int g = 0; g < keyBlocks.length; g++) { keyBlocks[g] = builders[g].build(); } @@ -271,13 +270,12 @@ public String toString() { StringBuilder b = new StringBuilder(); b.append("PackedValuesBlockHash{groups=["); boolean first = true; - for (HashAggregationOperator.GroupSpec spec : groups) { - if (first) { - first = false; - } else { + for (int i = 0; i < groups.length; i++) { + if (i > 0) { b.append(", "); } - b.append(spec.channel()).append(':').append(spec.elementType()); + Group group = groups[i]; + b.append(group.spec.channel()).append(':').append(group.spec.elementType()); } b.append("], entries=").append(bytesRefHash.size()); b.append(", size=").append(ByteSizeValue.ofBytes(bytesRefHash.ramBytesUsed())); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index a6ad5d1299543..3d06eba398513 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -33,6 +33,8 @@ abstract class AbstractBlockBuilder implements Block.Builder { /** The number of bytes currently estimated with the breaker. */ protected long estimatedBytes; + private boolean closed = false; + protected AbstractBlockBuilder(BlockFactory blockFactory) { this.blockFactory = blockFactory; } @@ -101,7 +103,14 @@ protected final void updatePosition() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } if (positionEntryIsOpen) { endPositionEntry(); } @@ -110,6 +119,16 @@ protected final void finish() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + protected abstract void growValuesArray(int newSize); /** The number of bytes used to represent each value element. */ @@ -125,6 +144,20 @@ protected final void ensureCapacity() { growValuesArray(newSize); } + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} + static int calculateNewArraySize(int currentSize) { // trivially, grows array by 50% return currentSize + (currentSize >> 1); @@ -133,6 +166,7 @@ static int calculateNewArraySize(int currentSize) { protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; + assert estimatedBytes >= 0; } private void setFirstValue(int position, int value) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index d83d26cf33831..4a019db5e03c0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -52,7 +52,7 @@ public boolean mayHaveMultivaluedFields() { @Override public final MvOrdering mvOrdering() { - return MvOrdering.UNORDERED; + return MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java index 49ce276074735..274e88cd8d8b6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java @@ -7,9 +7,14 @@ package org.elasticsearch.compute.data; -abstract class AbstractVectorBuilder { +abstract class AbstractVectorBuilder implements Vector.Builder { protected int valueCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + protected final BlockFactory blockFactory; /** The number of bytes currently estimated with the breaker. */ @@ -46,4 +51,38 @@ protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; } + + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ + protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } + } + + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 5b10a3a510de0..9c05d6d0ddfb8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -102,12 +102,20 @@ public interface Block extends Accountable, NamedWriteable, Releasable { /** * How are multivalued fields ordered? - *

Note that there isn't a {@code DESCENDING} because we don't have - * anything that makes descending fields.

+ * Some operators can enable its optimization when mv_values are sorted ascending or de-duplicated. */ enum MvOrdering { - ASCENDING, - UNORDERED; + UNORDERED(false, false), + DEDUPLICATED_UNORDERD(true, false), + DEDUPLICATED_AND_SORTED_ASCENDING(true, true); + + private final boolean deduplicated; + private final boolean sortedAscending; + + MvOrdering(boolean deduplicated, boolean sortedAscending) { + this.deduplicated = deduplicated; + this.sortedAscending = sortedAscending; + } } /** @@ -115,6 +123,20 @@ enum MvOrdering { */ MvOrdering mvOrdering(); + /** + * Are multivalued fields de-duplicated in each position + */ + default boolean mvDeduplicated() { + return mayHaveMultivaluedFields() == false || mvOrdering().deduplicated; + } + + /** + * Are multivalued fields sorted ascending in each position + */ + default boolean mvSortedAscending() { + return mayHaveMultivaluedFields() == false || mvOrdering().sortedAscending; + } + /** * Expand multivalued fields into one row per value. Returns the * block if there aren't any multivalued fields to expand. @@ -133,7 +155,11 @@ static Block constantNullBlock(int positions, BlockFactory blockFactory) { return blockFactory.newConstantNullBlock(positions); } - interface Builder { + /** + * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. + * This is {@link Releasable} and should be released after building the block or if building the block fails. + */ + interface Builder extends Releasable { /** * Appends a null value to the block. @@ -168,7 +194,7 @@ interface Builder { /** * How are multivalued fields ordered? This defaults to {@link Block.MvOrdering#UNORDERED} - * but when you set it to {@link Block.MvOrdering#ASCENDING} some operators can optimize + * but when you set it to {@link Block.MvOrdering#DEDUPLICATED_AND_SORTED_ASCENDING} some operators can optimize * themselves. This is a promise that is never checked. If you set this * to anything other than {@link Block.MvOrdering#UNORDERED} be sure the values are in * that order or other operators will make mistakes. The actual ordering isn't checked diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 2afea228a4a78..63de604c49b18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -305,7 +305,7 @@ public BytesRefBlock newBytesRefArrayBlock( MvOrdering mvOrdering ) { var b = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } @@ -315,7 +315,7 @@ public BytesRefVector.Builder newBytesRefVectorBuilder(int estimatedSize) { public BytesRefVector newBytesRefArrayVector(BytesRefArray values, int positionCount) { var b = new BytesRefArrayVector(values, positionCount, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 2ebbb771b5df1..a41ea0383368d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -8,10 +8,13 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Randomness; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Random; import java.util.function.Consumer; import static org.elasticsearch.common.lucene.BytesRefs.toBytesRef; @@ -68,8 +71,13 @@ public static Block[] fromListRow(List row, int blockSize) { if (object instanceof List listVal) { BuilderWrapper wrapper = wrapperFor(fromJava(listVal.get(0).getClass()), blockSize); wrapper.accept(listVal); - if (isAscending(listVal)) { - wrapper.builder.mvOrdering(Block.MvOrdering.ASCENDING); + Random random = Randomness.get(); + if (isDeduplicated(listVal) && random.nextBoolean()) { + if (isAscending(listVal) && random.nextBoolean()) { + wrapper.builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } else { + wrapper.builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_UNORDERD); + } } blocks[i] = wrapper.builder.build(); } else { @@ -100,6 +108,14 @@ private static boolean isAscending(List values) { return true; } + /** + * Detect blocks with deduplicated fields. This is *mostly* useful for + * exercising the specialized ascending implementations. + */ + private static boolean isDeduplicated(List values) { + return new HashSet<>(values).size() == values.size(); + } + public static Block[] fromList(List> list) { var size = list.size(); if (size == 0) { @@ -210,7 +226,7 @@ public static Object toJavaObject(Block block, int position) { private static Object valueAtOffset(Block block, int offset) { return switch (block.elementType()) { case BOOLEAN -> ((BooleanBlock) block).getBoolean(offset); - case BYTES_REF -> ((BytesRefBlock) block).getBytesRef(offset, new BytesRef()); + case BYTES_REF -> BytesRef.deepCopyOf(((BytesRefBlock) block).getBytesRef(offset, new BytesRef())); case DOUBLE -> ((DoubleBlock) block).getDouble(offset); case INT -> ((IntBlock) block).getInt(offset); case LONG -> ((LongBlock) block).getLong(offset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 2da9cfeba09f0..01994af1cfc96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -136,6 +136,11 @@ public void close() { static class Builder implements Block.Builder { private int positionCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + @Override public Builder appendNull() { positionCount++; @@ -174,7 +179,16 @@ public Block.Builder mvOrdering(MvOrdering mvOrdering) { @Override public Block build() { + if (closed) { + throw new IllegalStateException("already closed"); + } + close(); return new ConstantNullBlock(positionCount); } + + @Override + public void close() { + closed = true; + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index b21a956980f6a..6bcf913ce6240 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -82,8 +82,8 @@ public void close() { /** * A builder the for {@link DocBlock}. */ - public static Builder newBlockBuilder(int estimatedSize) { - return new Builder(estimatedSize); + public static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return new Builder(estimatedSize, blockFactory); } public static class Builder implements Block.Builder { @@ -91,10 +91,10 @@ public static class Builder implements Block.Builder { private final IntVector.Builder segments; private final IntVector.Builder docs; - private Builder(int estimatedSize) { - shards = IntVector.newVectorBuilder(estimatedSize); - segments = IntVector.newVectorBuilder(estimatedSize); - docs = IntVector.newVectorBuilder(estimatedSize); + private Builder(int estimatedSize, BlockFactory blockFactory) { + shards = IntVector.newVectorBuilder(estimatedSize, blockFactory); + segments = IntVector.newVectorBuilder(estimatedSize, blockFactory); + docs = IntVector.newVectorBuilder(estimatedSize, blockFactory); } public Builder appendShard(int shard) { @@ -153,5 +153,10 @@ public DocBlock build() { // Pass null for singleSegmentNonDecreasing so we calculate it when we first need it. return new DocVector(shards.build(), segments.build(), docs.build(), null).asBlock(); } + + @Override + public void close() { + Releasables.closeExpectNoException(shards, segments, docs); + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 0c85d433018e0..4467766a9e0ef 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.BytesRef; -import java.util.function.IntFunction; - /** * The type of elements in {@link Block} and {@link Vector} */ @@ -22,7 +20,7 @@ public enum ElementType { /** * Blocks containing only null values. */ - NULL(estimatedSize -> new ConstantNullBlock.Builder()), + NULL((estimatedSize, blockFactory) -> new ConstantNullBlock.Builder()), BYTES_REF(BytesRefBlock::newBlockBuilder), @@ -34,19 +32,32 @@ public enum ElementType { /** * Intermediate blocks which don't support retrieving elements. */ - UNKNOWN(estimatedSize -> { throw new UnsupportedOperationException("can't build null blocks"); }); + UNKNOWN((estimatedSize, blockFactory) -> { throw new UnsupportedOperationException("can't build null blocks"); }); + + interface BuilderSupplier { + Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory); + } - private final IntFunction builder; + private final BuilderSupplier builder; - ElementType(IntFunction builder) { + ElementType(BuilderSupplier builder) { this.builder = builder; } /** * Create a new {@link Block.Builder} for blocks of this type. + * @deprecated use {@link #newBlockBuilder(int, BlockFactory)} */ + @Deprecated public Block.Builder newBlockBuilder(int estimatedSize) { - return builder.apply(estimatedSize); + return builder.newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + /** + * Create a new {@link Block.Builder} for blocks of this type. + */ + public Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return builder.newBlockBuilder(estimatedSize, blockFactory); } public static ElementType fromJava(Class type) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java index 873565592dfaf..a4c89422213b1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -69,9 +68,10 @@ private Page(boolean copyBlocks, int positionCount, Block[] blocks) { // assert assertPositionCount(blocks); this.positionCount = positionCount; this.blocks = copyBlocks ? blocks.clone() : blocks; - if (Assertions.ENABLED) { - for (Block b : blocks) { - assert b.getPositionCount() == positionCount : "expected positionCount=" + positionCount + " but was " + b; + for (Block b : blocks) { + assert b.getPositionCount() == positionCount : "expected positionCount=" + positionCount + " but was " + b; + if (b.isReleased()) { + throw new IllegalArgumentException("can't build page out of released blocks but [" + b + "] was released"); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 171bdbd62f4d0..c9ecf1aa9e399 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -50,7 +50,11 @@ public interface Vector extends Accountable, Releasable { /** The block factory associated with this vector. */ BlockFactory blockFactory(); - interface Builder { + /** + * Builds {@link Vector}s. Typically, you use one of it's direct supinterfaces like {@link IntVector.Builder}. + * This is {@link Releasable} and should be released after building the vector or if building the vector fails. + */ + interface Builder extends Releasable { /** * Builds the block. This method can be called multiple times. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 10ff868c09806..ddb0eced039be 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -93,6 +93,7 @@ $endif$ public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -137,7 +138,7 @@ $endif$ } released = true; $if(BytesRef)$ - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); $else$ blockFactory.adjustBreaker(-ramBytesUsed(), true); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index b6a8714f882ee..3e6ccc2286675 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -110,7 +110,7 @@ $endif$ $if(BytesRef)$ @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 4d43f25577cc5..0ccfc45f18664 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -14,6 +14,8 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; $else$ +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; $endif$ @@ -41,7 +43,7 @@ $else$ $Type$BlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new $type$[initialSize]; } $endif$ @@ -246,27 +248,68 @@ $endif$ public $Type$Block build() { finish(); $Type$Block block; - if (hasNonNullValue && positionCount == 1 && valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0 || firstValueIndexes != null; + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); + if (isDense() && singleValued()) { + block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); + } else { + block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new Constant$Type$Vector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ if (isDense() && singleValued()) { block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); } else { block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); +$endif$ + built(); return block; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index b813120b42e43..1e243c49b5d82 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -83,24 +83,62 @@ $endif$ @Override public $Type$Vector build() { + finish(); $Type$Vector vector; - if (valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0; + if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); + vector = new $Type$ArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ + if (valueCount == 1) { vector = new Constant$Type$Vector(values[0], 1, blockFactory); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ vector = new $Type$ArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); +$endif$ + built(); return vector; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index 86bc6b0a095d6..d732c85db7467 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -58,4 +58,12 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { } return new $Type$ArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java index 4290075b05ae8..28a9359497393 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java @@ -142,7 +142,7 @@ private static class LongSingletonValuesReader extends BlockDocValuesReader { @Override public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -197,7 +197,7 @@ private static class LongValuesReader extends BlockDocValuesReader { @Override public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -259,7 +259,7 @@ private static class IntSingletonValuesReader extends BlockDocValuesReader { @Override public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -314,7 +314,7 @@ private static class IntValuesReader extends BlockDocValuesReader { @Override public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -378,7 +378,7 @@ private static class DoubleSingletonValuesReader extends BlockDocValuesReader { @Override public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -435,7 +435,7 @@ private static class DoubleValuesReader extends BlockDocValuesReader { @Override public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -497,7 +497,7 @@ private static class BytesValuesReader extends BlockDocValuesReader { @Override public BytesRefBlock.Builder builder(int positionCount) { - return BytesRefBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BytesRefBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -558,7 +558,7 @@ private static class BooleanSingletonValuesReader extends BlockDocValuesReader { @Override public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -613,7 +613,7 @@ private static class BooleanValuesReader extends BlockDocValuesReader { @Override public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 281693a487255..1a1604406892c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -181,7 +181,13 @@ private SubscribableListener runSingleLoopIteration() { if (op.isFinished() == false && nextOp.needsInput()) { Page page = op.getOutput(); - if (page != null && page.getPositionCount() != 0) { + if (page == null) { + // No result, just move to the next iteration + } else if (page.getPositionCount() == 0) { + // Empty result, release any memory it holds immediately and move to the next iteration + page.releaseBlocks(); + } else { + // Non-empty result from the previous operation, move it to the next operation nextOp.addInput(page); movedPage = true; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st index 337b095ebe8d0..bd3e290a3625c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st @@ -70,7 +70,7 @@ $endif$ * {@link Block} using an adaptive algorithm based on the size of the input list. */ public $Type$Block dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); @@ -122,7 +122,7 @@ $endif$ * which picks based on the number of elements at each position. */ public $Type$Block dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); @@ -154,7 +154,7 @@ $endif$ * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ public $Type$Block dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { + if (block.mvDeduplicated()) { return block; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index d52f25e9d8306..0fb6ec6f63d96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -36,10 +36,6 @@ public record ExchangeSinkOperatorFactory(Supplier exchangeSinks, implements SinkOperatorFactory { - public ExchangeSinkOperatorFactory(Supplier exchangeSinks) { - this(exchangeSinks, Function.identity()); - } - @Override public SinkOperator get(DriverContext driverContext) { return new ExchangeSinkOperator(exchangeSinks.get(), transformer); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java index b8a41a3ee343d..bd2027cade78f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java @@ -9,12 +9,14 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.core.Releasable; /** * Builds {@link Block}s from keys and values encoded into {@link BytesRef}s. */ -interface ResultBuilder { +interface ResultBuilder extends Releasable { /** * Called for each sort key before {@link #decodeValue} to consume the sort key and * store the value of the key for {@link #decodeValue} can use it to reconstruct @@ -36,15 +38,21 @@ interface ResultBuilder { */ Block build(); - static ResultBuilder resultBuilderFor(ElementType elementType, TopNEncoder encoder, boolean inKey, int positions) { + static ResultBuilder resultBuilderFor( + BlockFactory blockFactory, + ElementType elementType, + TopNEncoder encoder, + boolean inKey, + int positions + ) { return switch (elementType) { - case BOOLEAN -> new ResultBuilderForBoolean(encoder, inKey, positions); - case BYTES_REF -> new ResultBuilderForBytesRef(encoder, inKey, positions); - case INT -> new ResultBuilderForInt(encoder, inKey, positions); - case LONG -> new ResultBuilderForLong(encoder, inKey, positions); - case DOUBLE -> new ResultBuilderForDouble(encoder, inKey, positions); - case NULL -> new ResultBuilderForNull(); - case DOC -> new ResultBuilderForDoc(positions); + case BOOLEAN -> new ResultBuilderForBoolean(blockFactory, encoder, inKey, positions); + case BYTES_REF -> new ResultBuilderForBytesRef(blockFactory, encoder, inKey, positions); + case INT -> new ResultBuilderForInt(blockFactory, encoder, inKey, positions); + case LONG -> new ResultBuilderForLong(blockFactory, encoder, inKey, positions); + case DOUBLE -> new ResultBuilderForDouble(blockFactory, encoder, inKey, positions); + case NULL -> new ResultBuilderForNull(blockFactory); + case DOC -> new ResultBuilderForDoc(blockFactory, positions); default -> { assert false : "Result builder for [" + elementType + "]"; throw new UnsupportedOperationException("Result builder for [" + elementType + "]"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java index 166d5be83b474..7fb507ffdbead 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java @@ -13,12 +13,15 @@ import org.elasticsearch.compute.data.DocVector; class ResultBuilderForDoc implements ResultBuilder { + private final BlockFactory blockFactory; private final int[] shards; private final int[] segments; private final int[] docs; private int position; - ResultBuilderForDoc(int positions) { + ResultBuilderForDoc(BlockFactory blockFactory, int positions) { + // TODO use fixed length builders + this.blockFactory = blockFactory; this.shards = new int[positions]; this.segments = new int[positions]; this.docs = new int[positions]; @@ -40,9 +43,9 @@ public void decodeValue(BytesRef values) { @Override public Block build() { return new DocVector( - BlockFactory.getNonBreakingInstance().newIntArrayVector(shards, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(segments, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(docs, position), + blockFactory.newIntArrayVector(shards, position), + blockFactory.newIntArrayVector(segments, position), + blockFactory.newIntArrayVector(docs, position), null ).asBlock(); } @@ -51,4 +54,9 @@ public Block build() { public String toString() { return "ValueExtractorForDoc"; } + + @Override + public void close() { + // TODO memory accounting + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java index 05b9ba2a07658..a45f16fc30910 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java @@ -9,10 +9,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; public class ResultBuilderForNull implements ResultBuilder { + private final BlockFactory blockFactory; private int positions; + public ResultBuilderForNull(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } + @Override public void decodeKey(BytesRef keys) { throw new AssertionError("somehow got a value for a null key"); @@ -29,11 +35,16 @@ public void decodeValue(BytesRef values) { @Override public Block build() { - return Block.constantNullBlock(positions); + return Block.constantNullBlock(positions, blockFactory); } @Override public String toString() { return "ValueExtractorForNull"; } + + @Override + public void close() { + // Nothing to close + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 86b3a18992db4..9657d60376763 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -12,7 +12,9 @@ import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -205,7 +207,15 @@ public record TopNOperatorFactory( @Override public TopNOperator get(DriverContext driverContext) { - return new TopNOperator(driverContext.breaker(), topCount, elementTypes, encoders, sortOrders, maxPageSize); + return new TopNOperator( + driverContext.blockFactory(), + driverContext.breaker(), + topCount, + elementTypes, + encoders, + sortOrders, + maxPageSize + ); } @Override @@ -222,6 +232,7 @@ public String describe() { } } + private final BlockFactory blockFactory; private final CircuitBreaker breaker; private final Queue inputQueue; @@ -231,9 +242,11 @@ public String describe() { private final List encoders; private final List sortOrders; + private Row spare; private Iterator output; public TopNOperator( + BlockFactory blockFactory, CircuitBreaker breaker, int topCount, List elementTypes, @@ -241,6 +254,7 @@ public TopNOperator( List sortOrders, int maxPageSize ) { + this.blockFactory = blockFactory; this.breaker = breaker; this.maxPageSize = maxPageSize; this.elementTypes = elementTypes; @@ -301,21 +315,20 @@ public void addInput(Page page) { * and must be closed. That happens either because it's overflow from the * inputQueue or because we hit an allocation failure while building it. */ - Row row = null; try { for (int i = 0; i < page.getPositionCount(); i++) { - if (row == null) { - row = new Row(breaker); + if (spare == null) { + spare = new Row(breaker); } else { - row.keys.clear(); - row.orderByCompositeKeyAscending.clear(); - row.values.clear(); + spare.keys.clear(); + spare.orderByCompositeKeyAscending.clear(); + spare.values.clear(); } - rowFiller.row(i, row); - row = inputQueue.insertWithOverflow(row); + rowFiller.row(i, spare); + spare = inputQueue.insertWithOverflow(spare); } } finally { - Releasables.close(row); + Releasables.close(() -> page.releaseBlocks()); } } @@ -327,18 +340,24 @@ public void finish() { } private Iterator toPages() { + if (spare != null) { + // Remove the spare, we're never going to use it again. + spare.close(); + spare = null; + } if (inputQueue.size() == 0) { return Collections.emptyIterator(); } List list = new ArrayList<>(inputQueue.size()); + List result = new ArrayList<>(); + ResultBuilder[] builders = null; + boolean success = false; try { while (inputQueue.size() > 0) { list.add(inputQueue.pop()); } Collections.reverse(list); - List result = new ArrayList<>(); - ResultBuilder[] builders = null; int p = 0; int size = 0; for (int i = 0; i < list.size(); i++) { @@ -347,6 +366,7 @@ private Iterator toPages() { builders = new ResultBuilder[elementTypes.size()]; for (int b = 0; b < builders.length; b++) { builders[b] = ResultBuilder.resultBuilderFor( + blockFactory, elementTypes.get(b), encoders.get(b).toUnsortable(), channelInKey(sortOrders, b), @@ -386,14 +406,22 @@ private Iterator toPages() { p++; if (p == size) { result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + Releasables.closeExpectNoException(builders); builders = null; } - } assert builders == null; + success = true; return result.iterator(); } finally { - Releasables.closeExpectNoException(() -> Releasables.close(list)); + if (success == false) { + List close = new ArrayList<>(list); + for (Page p : result) { + close.add(p::releaseBlocks); + } + Collections.addAll(close, builders); + Releasables.closeExpectNoException(Releasables.wrap(close)); + } } } @@ -422,10 +450,15 @@ public Page getOutput() { @Override public void close() { /* - * If everything went well we'll have drained inputQueue to this'll - * be a noop. But if inputQueue + * If we close before calling finish then spare and inputQueue will be live rows + * that need closing. If we close after calling finish then the output iterator + * will contain pages of results that have yet to be returned. */ - Releasables.closeExpectNoException(() -> Releasables.close(inputQueue)); + Releasables.closeExpectNoException( + spare, + inputQueue == null ? null : Releasables.wrap(inputQueue), + output == null ? null : Releasables.wrap(() -> Iterators.map(output, p -> p::releaseBlocks)) + ); } private static long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(TopNOperator.class) + RamUsageEstimator diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st index 9ec03270da093..dbe0b23af93bb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st @@ -10,7 +10,6 @@ package org.elasticsearch.compute.operator.topn; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.$Type$Block; import org.elasticsearch.compute.data.$Type$Vector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -22,11 +21,11 @@ abstract class KeyExtractorFor$Type$ implements KeyExtractor { return new KeyExtractorFor$Type$.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorFor$Type$.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorFor$Type$.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorFor$Type$.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorFor$Type$.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st index 5f9a35bd0ebd3..ebe62398c8504 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.$Type$Block; class ResultBuilderFor$Type$ implements ResultBuilder { @@ -26,14 +27,14 @@ $endif$ */ private $type$ key; - ResultBuilderFor$Type$(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderFor$Type$(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { $if(BytesRef)$ this.encoder = encoder; $else$ assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); $endif$ this.inKey = inKey; - this.builder = $Type$Block.newBlockBuilder(initialSize); + this.builder = $Type$Block.newBlockBuilder(initialSize, blockFactory); } @Override @@ -81,4 +82,9 @@ $endif$ public String toString() { return "ResultBuilderFor$Type$[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 04a966b399870..bdf696f460060 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -287,7 +287,7 @@ public void testLimitOperator() { try ( var driver = new Driver( driverContext, - new SequenceLongBlockSourceOperator(values, 100), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), new PageConsumerOperator(page -> { LongBlock block = page.getBlock(0); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index a4b6c8b965962..22325039af124 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -91,8 +91,8 @@ protected final ByteSizeValue smallEnoughToCircuitBreak() { public final void testIgnoresNulls() { int end = between(1_000, 100_000); List results = new ArrayList<>(); - List input = CannedSourceOperator.collectPages(simpleInput(end)); DriverContext driverContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), end)); try ( Driver d = new Driver( @@ -111,7 +111,9 @@ public final void testIgnoresNulls() { public final void testMultivalued() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(new PositionMergingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } @@ -119,7 +121,7 @@ public final void testMultivaluedWithNulls() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages( - new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(end))) + new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end))) ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index 11241020a6709..623de7fdd1fff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,8 +22,8 @@ public class CountAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLong())); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLong())); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 74cd88feed3f4..febbb4d4a0615 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceBooleanBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class CountDistinctBooleanAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceBooleanBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> randomBoolean()).toList()); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java index eab1b9cb2d8de..7360b101bf79d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBooleanTupleBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBooleanTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomBoolean())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index 69ccc0a04c0f9..c495a6b9f196b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.BytesRefBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctBytesRefAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new BytesRefBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> new BytesRef(String.valueOf(between(-max, max)))).toList() diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java index 919d06af430fd..eadbba9f91880 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBytesRefTupleBlockSourceOperator; @@ -35,7 +36,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBytesRefTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), new BytesRef(String.valueOf(between(1, 10000))))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index c0678441cdc74..ccfe7b426ebca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java index 5a928f12d33b7..0c4d89da09b99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomDoubleBetween(0, 100, true))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 3699a87431937..b67e4cdee7e97 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -29,7 +29,7 @@ public class CountDistinctIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java index f2a46e9f4c3af..678024c19d391 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), between(0, 10000)))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 556f9d0ccc462..704b5c649f744 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -30,9 +30,9 @@ public class CountDistinctLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java index a5959471b8e15..4282adaba595e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomLongBetween(0, 100_000))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java index 54a35fcc19cb2..945c68711bb4e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -33,9 +34,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { if (randomBoolean()) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index eab6eb30261bd..4ae58fd8c6333 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -147,7 +147,9 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { public final void testNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -155,7 +157,7 @@ public final void testNullGroupsAndValues() { public final void testNullGroups() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -184,7 +186,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -192,7 +194,7 @@ public final void testNullValues() { public final void testNullValuesInitialIntermediateFinal() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -220,7 +222,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testMultivalued() { DriverContext driverContext = driverContext(); int end = between(1_000, 100_000); - List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -228,7 +230,9 @@ public final void testMultivalued() { public final void testMulitvaluedNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(mergeValues(simpleInput(driverContext.blockFactory(), end))) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -236,7 +240,7 @@ public final void testMulitvaluedNullGroupsAndValues() { public final void testMulitvaluedNullGroup() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -244,7 +248,7 @@ public final void testMulitvaluedNullGroup() { public final void testMulitvaluedNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -295,12 +299,13 @@ private void assertNullOnly(List operators) { public final void testNullSome() { DriverContext driverContext = driverContext(); - assertNullSome(List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); + assertNullSome(driverContext, List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); } public final void testNullSomeInitialFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.FINAL).get(driverContext) @@ -311,6 +316,7 @@ public final void testNullSomeInitialFinal() { public final void testNullSomeInitialIntermediateFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INTERMEDIATE).get(driverContext), @@ -322,8 +328,8 @@ public final void testNullSomeInitialIntermediateFinal() { /** * Run the agg on some data where one group is always null. */ - private void assertNullSome(List operators) { - List inputData = CannedSourceOperator.collectPages(simpleInput(1000)); + private void assertNullSome(DriverContext driverContext, List operators) { + List inputData = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 1000)); SeenGroups seenGroups = seenGroups(inputData); long nullGroup = randomFrom(seenGroups.nonNull); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java index b67220b4909b7..cfda483d029f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MaxDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java index 3750aec95f3a7..9a2c8bc17685d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -24,7 +25,7 @@ public class MaxDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java index 72cfa06222b50..e76021b883120 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MaxIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java index 9ffee498eeba2..313e10be39855 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 4e84f2e672b97..a51aa98f7a5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MaxLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java index e284f2a6103d1..a1f44e128c2e1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java index 74bda421a545e..1c14a8e7855ce 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0); Randomness.shuffle(values); return new SequenceDoubleBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java index 6751486453f30..06ddb2a734f8c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { double[][] samples = new double[][] { { 1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0 }, { 0.1, 1.5, 2.0, 3.0, 4.0, 7.5, 100.0 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java index 20506cc5c8f93..40e422b6efc26 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12, 125, 20, 20, 43, 60, 90); Randomness.shuffle(values); return new SequenceIntBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java index 20f62c67a16cc..2f00764f6fe51 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { int[][] samples = new int[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index d80415f83daa2..465bb5800bbb6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,10 +23,10 @@ public class MedianAbsoluteDeviationLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12L, 125L, 20L, 20L, 43L, 60L, 90L); Randomness.shuffle(values); - return new SequenceLongBlockSourceOperator(values); + return new SequenceLongBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java index c3cebad8e0e0b..2c6bfc1204591 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { long[][] samples = new long[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, @@ -42,7 +43,7 @@ protected SourceOperator simpleInput(int end) { values.add(Tuple.tuple((long) i, v)); } } - return new TupleBlockSourceOperator(values); + return new TupleBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java index 622302d549fd0..7e0b7241cf258 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MinDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java index 12c63e354547a..7c4141f4a7ad1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class MinDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java index 2dc0e893875ab..dc1ab1398fb90 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MinIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java index 4ffbe9b1396d3..55cfc2d124e5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 25a420237893e..91feb141ac74b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MinLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java index 311e7e41ed9ac..02dda3fe3c236 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java index 96e61d4782022..61f26cd0209b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -41,7 +42,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java index c0d6595e088eb..9495e78ec47ca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java index c34a01e608d1a..37d153f7bcae6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,7 +41,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(0, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java index a018fba96e897..948e156e52c85 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-1, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index cf0b18840d91e..eb32dac18ea80 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,9 +41,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, 1_000_000); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(0, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(0, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java index 609526532b72e..6360be8595ff8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -42,9 +43,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-0, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 767f9a2d5c25b..d3dc262419008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -28,7 +29,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java index 03a7269b84690..8b86d99653282 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class SumDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index e6fccf2d46f61..736386fae3dec 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -27,7 +27,7 @@ public class SumIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java index 71666024c819d..0b8678a0e3f05 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -32,7 +33,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-max, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index ae5aaa5b21965..e9523c5583cd4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -27,9 +27,9 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override @@ -53,7 +53,7 @@ public void testOverflowFails() { try ( Driver d = new Driver( driverContext, - new SequenceLongBlockSourceOperator(LongStream.of(Long.MAX_VALUE - 1, 2)), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java index e0dc918b515d6..827dc06a4f542 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -32,9 +33,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-max, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index cf7fbbea1c775..f99ded96a9984 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -906,10 +906,12 @@ void assertZeroPositionsAndRelease(Vector vector) { void releaseAndAssertBreaker(Block... blocks) { assertThat(breaker.getUsed(), greaterThan(0L)); + Page[] pages = Arrays.stream(blocks).map(Page::new).toArray(Page[]::new); Releasables.closeExpectNoException(blocks); Arrays.stream(blocks).forEach(block -> assertThat(block.isReleased(), is(true))); Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotDoubleRelease); - Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotReadFromPage); + Arrays.stream(pages).forEach(BasicBlockTests::assertCannotReadFromPage); + Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotAddToPage); assertThat(breaker.getUsed(), is(0L)); } @@ -924,12 +926,16 @@ static void assertCannotDoubleRelease(Block block) { assertThat(ex.getMessage(), containsString("can't release already released block")); } - static void assertCannotReadFromPage(Block block) { - Page page = new Page(block); + static void assertCannotReadFromPage(Page page) { var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); } + static void assertCannotAddToPage(Block block) { + var e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks but")); + } + static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index de552d242afa2..2179e68c47832 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -7,47 +7,48 @@ package org.elasticsearch.compute.data; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.List; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class BlockBuilderTests extends ESTestCase { - - public void testAllNullsInt() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(IntBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); } + return params; } - public void testAllNullsLong() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(LongBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } - } + private final ElementType elementType; - public void testAllNullsDouble() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(DoubleBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } + public BlockBuilderTests(ElementType elementType) { + this.elementType = elementType; } - public void testAllNullsBytesRef() { + public void testAllNulls() { for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(BytesRefBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(0), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(100), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(1000), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(randomIntBetween(0, 100)), numEntries); } } @@ -65,4 +66,95 @@ private void testAllNullsImpl(Block.Builder builder, int numEntries) { static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } + + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + elementType.newBlockBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmallSingleValued() { + testBuild(between(1, 100), false, 1); + } + + public void testBuildHugeSingleValued() { + testBuild(between(1_000, 50_000), false, 1); + } + + public void testBuildSmallSingleValuedNullable() { + testBuild(between(1, 100), true, 1); + } + + public void testBuildHugeSingleValuedNullable() { + testBuild(between(1_000, 50_000), true, 1); + } + + public void testBuildSmallMultiValued() { + testBuild(between(1, 100), false, 3); + } + + public void testBuildHugeMultiValued() { + testBuild(between(1_000, 50_000), false, 3); + } + + public void testBuildSmallMultiValuedNullable() { + testBuild(between(1, 100), true, 3); + } + + public void testBuildHugeMultiValuedNullable() { + testBuild(between(1_000, 50_000), true, 3); + } + + public void testBuildSingle() { + testBuild(1, false, 1); + } + + private void testBuild(int size, boolean nullable, int maxValueCount) { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Block.Builder builder = elementType.newBlockBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, nullable, 1, maxValueCount, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + try { + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index a524221dd50d7..7be79e73b5d9d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -49,11 +49,29 @@ public static BlockFactory blockFactory(ByteSizeValue size) { @ParametersFactory public static List params() { - List> l = List.of(() -> { - CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - return BlockFactory.getInstance(breaker, bigArrays); - }, BlockFactory::getGlobalInstance); + List> l = List.of(new Supplier<>() { + @Override + public BlockFactory get() { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + return BlockFactory.getInstance(breaker, bigArrays); + } + + @Override + public String toString() { + return "1gb"; + } + }, new Supplier<>() { + @Override + public BlockFactory get() { + return BlockFactory.getGlobalInstance(); + } + + @Override + public String toString() { + return "global"; + } + }); return l.stream().map(s -> new Object[] { s }).toList(); } @@ -555,13 +573,16 @@ static Block.MvOrdering randomOrdering() { } void releaseAndAssertBreaker(T data) { + Page page = data instanceof Block block ? new Page(block) : null; assertThat(breaker.getUsed(), greaterThan(0L)); Releasables.closeExpectNoException(data); if (data instanceof Block block) { assertThat(block.isReleased(), is(true)); - Page page = new Page(block); - var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); + Exception e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); + + e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks")); } assertThat(breaker.getUsed(), is(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java index a5637128705ca..dd61c8f6478d3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java @@ -77,6 +77,7 @@ public static void readInto(List> values, Page page) { for (int i = 0; i < page.getBlockCount(); i++) { readInto(values.get(i), page.getBlock(i)); } + page.releaseBlocks(); } public static void readInto(List values, Block block) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index 0eb9beec2e7f9..ee654497c1ec3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class BytesRefBlockEqualityTests extends ESTestCase { @@ -332,10 +331,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -365,15 +368,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder3 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendBytesRef(new BytesRef(Integer.toHexString(randomInt())))); + for (int i = 0; i < values; i++) { + BytesRef value = new BytesRef(Integer.toHexString(randomInt())); + builder1.appendBytesRef(value); + builder2.appendBytesRef(value); + builder3.appendBytesRef(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); - BytesRefBlock block3 = builder.build(); + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); + BytesRefBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index d8258ab28a078..465dc95a15ea4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; @@ -98,47 +99,55 @@ public void testRandomShardSegmentDocMap() { } private void assertShardSegmentDocMap(int[][] data, int[][] expected) { - DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length); - for (int r = 0; r < data.length; r++) { - builder.appendShard(data[r][0]); - builder.appendSegment(data[r][1]); - builder.appendDoc(data[r][2]); + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length, blockFactory)) { + for (int r = 0; r < data.length; r++) { + builder.appendShard(data[r][0]); + builder.appendSegment(data[r][1]); + builder.appendDoc(data[r][2]); + } + try (DocVector docVector = builder.build().asVector()) { + int[] forwards = docVector.shardSegmentDocMapForwards(); + + int[][] result = new int[docVector.getPositionCount()][]; + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(forwards[p]), + docVector.segments().getInt(forwards[p]), + docVector.docs().getInt(forwards[p]) }; + } + assertThat(result, equalTo(expected)); + + int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(backwards[forwards[p]]), + docVector.segments().getInt(backwards[forwards[p]]), + docVector.docs().getInt(backwards[forwards[p]]) }; + } + + assertThat(result, equalTo(data)); + } } - DocVector docVector = builder.build().asVector(); - int[] forwards = docVector.shardSegmentDocMapForwards(); - - int[][] result = new int[docVector.getPositionCount()][]; - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(forwards[p]), - docVector.segments().getInt(forwards[p]), - docVector.docs().getInt(forwards[p]) }; - } - assertThat(result, equalTo(expected)); - - int[] backwards = docVector.shardSegmentDocMapBackwards(); - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(backwards[forwards[p]]), - docVector.segments().getInt(backwards[forwards[p]]), - docVector.docs().getInt(backwards[forwards[p]]) }; - } - - assertThat(result, equalTo(data)); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testCannotDoubleRelease() { var block = new DocVector(IntVector.range(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), IntVector.range(0, 2), null) .asBlock(); assertThat(block.isReleased(), is(false)); + Page page = new Page(block); + Releasables.closeExpectNoException(block); assertThat(block.isReleased(), is(true)); - var ex = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(ex.getMessage(), containsString("can't release already released block")); + Exception e = expectThrows(IllegalStateException.class, () -> block.close()); + assertThat(e.getMessage(), containsString("can't release already released block")); - Page page = new Page(block); - var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); + e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); + + e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks")); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 2abbcc0b989f1..7dda97f52834e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class DoubleBlockEqualityTests extends ESTestCase { @@ -224,10 +223,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -248,15 +251,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder3 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendDouble(randomDouble())); + for (int i = 0; i < values; i++) { + double value = randomDouble(); + builder1.appendDouble(value); + builder2.appendDouble(value); + builder3.appendDouble(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); - DoubleBlock block3 = builder.build(); + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); + DoubleBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index c4e19106d4368..40c84324f13d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class IntBlockEqualityTests extends ESTestCase { @@ -185,10 +184,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -210,15 +213,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder3 = IntBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendInt(randomInt())); + for (int i = 0; i < values; i++) { + int value = randomInt(); + builder1.appendInt(value); + builder2.appendInt(value); + builder3.appendInt(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); - IntBlock block3 = builder.build(); + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); + IntBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index 3d08b2a96d635..a24b4a4dd6fa6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class LongBlockEqualityTests extends ESTestCase { @@ -191,10 +190,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -216,15 +219,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder3 = LongBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); - int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendLong(randomLong())); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); + int valueCount = randomIntBetween(1, 16); + for (int i = 0; i < valueCount; i++) { + long value = randomLong(); + builder1.appendLong(value); + builder2.appendLong(value); + builder3.appendLong(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); - LongBlock block3 = builder.build(); + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); + LongBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index 4684da93a661a..d9377a490368d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -139,6 +139,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public IntBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestLongBlockBuilder extends TestBlockBuilder { @@ -195,6 +200,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public LongBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestDoubleBlockBuilder extends TestBlockBuilder { @@ -251,6 +261,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public DoubleBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBytesRefBlockBuilder extends TestBlockBuilder { @@ -307,6 +322,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BytesRefBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBooleanBlockBuilder extends TestBlockBuilder { @@ -366,5 +386,10 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BooleanBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java new file mode 100644 index 0000000000000..656d79070f217 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class VectorBuilderTests extends ESTestCase { + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); + } + return params; + } + + private final ElementType elementType; + + public VectorBuilderTests(ElementType elementType) { + this.elementType = elementType; + } + + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmall() { + testBuild(between(1, 100)); + } + + public void testBuildHuge() { + testBuild(between(1_000, 50_000)); + } + + public void testBuildSingle() { + testBuild(1); + } + + private void testBuild(int size) { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + try { + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> BooleanVector.newVectorBuilder(estimatedSize, blockFactory); + case BYTES_REF -> BytesRefVector.newVectorBuilder(estimatedSize, blockFactory); + case DOUBLE -> DoubleVector.newVectorBuilder(estimatedSize, blockFactory); + case INT -> IntVector.newVectorBuilder(estimatedSize, blockFactory); + case LONG -> LongVector.newVectorBuilder(estimatedSize, blockFactory); + }; + } + + private void fill(Vector.Builder builder, Vector from) { + switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BooleanVector.Builder) builder).appendBoolean(((BooleanVector) from).getBoolean(p)); + } + } + case BYTES_REF -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BytesRefVector.Builder) builder).appendBytesRef(((BytesRefVector) from).getBytesRef(p, new BytesRef())); + } + } + case DOUBLE -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((DoubleVector.Builder) builder).appendDouble(((DoubleVector) from).getDouble(p)); + } + } + case INT -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((IntVector.Builder) builder).appendInt(((IntVector) from).getInt(p)); + } + } + case LONG -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((LongVector.Builder) builder).appendLong(((LongVector) from).getLong(p)); + } + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 9fa9f7e32c654..df67ee2e7822a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -45,6 +45,12 @@ public VectorFixedBuilderTests(ElementType elementType) { this.elementType = elementType; } + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + public void testBuildSmall() { testBuild(between(1, 100)); } @@ -59,25 +65,32 @@ public void testBuildSingle() { private void testBuild(int size) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(size, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); - assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + try (Vector.Builder builder = vectorBuilder(size, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testDoubleBuild() { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(10, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); } - Exception e = expectThrows(IllegalStateException.class, builder::build); - assertThat(e.getMessage(), equalTo("already closed")); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } public void testCranky() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 64edcaa43d89b..4c0e33e5cfb82 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; @@ -115,7 +116,7 @@ static Operator.OperatorFactory factory(IndexReader reader, ValuesSourceType vsT } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { // The test wants more than one segment. We shoot for about 10. int commitEvery = Math.max(1, size / 10); try ( @@ -198,21 +199,35 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testLoadAll() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert( + driverContext, + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); } public void testLoadAllInOnePage() { + DriverContext driverContext = driverContext(); loadSimpleAndAssert( - List.of(CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024))))) + driverContext, + List.of( + CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ) + ) ); } public void testEmpty() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(0))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert(driverContext, CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 0))); } public void testLoadAllInOnePageShuffled() { - Page source = CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + Page source = CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); List shuffleList = new ArrayList<>(); IntStream.range(0, source.getPositionCount()).forEach(i -> shuffleList.add(i)); Randomness.shuffle(shuffleList); @@ -222,11 +237,10 @@ public void testLoadAllInOnePageShuffled() { shuffledBlocks[b] = source.getBlock(b).filter(shuffleArray); } source = new Page(shuffledBlocks); - loadSimpleAndAssert(List.of(source)); + loadSimpleAndAssert(driverContext, List.of(source)); } - private void loadSimpleAndAssert(List input) { - DriverContext driverContext = driverContext(); + private void loadSimpleAndAssert(DriverContext driverContext, List input) { List results = new ArrayList<>(); List operators = List.of( factory( @@ -314,7 +328,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvKeywords.getBytesRef(offset + v, new BytesRef()).utf8ToString(), equalTo(PREFIX[v] + key)); } if (key % 3 > 0) { - assertThat(mvKeywords.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvKeywords.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(bools.getBoolean(i), equalTo(key % 2 == 0)); @@ -324,7 +338,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvBools.getBoolean(offset + v), equalTo(BOOLEANS[key % 3][v])); } if (key % 3 > 0) { - assertThat(mvBools.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvBools.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(mvInts.getValueCount(i), equalTo(key % 3 + 1)); @@ -333,7 +347,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvInts.getInt(offset + v), equalTo(1_000 * key + v)); } if (key % 3 > 0) { - assertThat(mvInts.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvInts.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(mvLongs.getValueCount(i), equalTo(key % 3 + 1)); @@ -342,7 +356,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvLongs.getLong(offset + v), equalTo(-1_000L * key + v)); } if (key % 3 > 0) { - assertThat(mvLongs.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvLongs.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(doubles.getDouble(i), equalTo(key / 123_456d)); @@ -351,7 +365,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvDoubles.getDouble(offset + v), equalTo(key / 123_456d + v)); } if (key % 3 > 0) { - assertThat(mvDoubles.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvDoubles.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 9eaa1e333f66e..784d5134e9608 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -28,9 +29,9 @@ public class AggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 8f995d9a31bc3..14d83ce252e5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -95,7 +95,7 @@ protected final BigArrays nonBreakingBigArrays() { /** * A {@link DriverContext} with a nonBreakingBigArrays. */ - protected DriverContext driverContext() { + protected DriverContext driverContext() { // TODO make this final and return a breaking block factory return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index d5b07a713b8b4..57ea313b88dab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -53,6 +53,25 @@ public static Page mergePages(List pages) { return new Page(blocks); } + /** + * Make a deep copy of some pages. Useful so that when the originals are + * released the copies are still live. + */ + public static List deepCopyOf(List pages) { + List out = new ArrayList<>(pages.size()); + for (Page p : pages) { + Block[] blocks = new Block[p.getBlockCount()]; + for (int b = 0; b < blocks.length; b++) { + Block orig = p.getBlock(b); + Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount()); + builder.copyFrom(orig, 0, p.getPositionCount()); + blocks[b] = builder.build(); + } + out.add(new Page(blocks)); + } + return out; + } + private final Iterator page; public CannedSourceOperator(Iterator page) { @@ -77,5 +96,9 @@ public Page getOutput() { } @Override - public void close() {} + public void close() { + while (page.hasNext()) { + page.next().releaseBlocks(); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 7825e035df0db..f06a2780b7446 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; @@ -24,7 +25,7 @@ public class ColumnExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 156f37d8d8e7a..91e18214abee2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -21,8 +22,8 @@ public class EvalOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } record Addition(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index b26fe0c33fe1c..9c29471473203 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,8 +24,8 @@ public class FilterOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } record SameLastDigit(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 9d1084fcc4cf3..d01a5b17ac788 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -56,7 +56,7 @@ protected final Operator.OperatorFactory simple(BigArrays bigArrays) { public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -80,7 +80,7 @@ public final void testInitialFinal() { public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( @@ -101,7 +101,7 @@ public final void testManyInitialFinal() { public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -127,7 +127,7 @@ public final void testInitialIntermediateFinal() { public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); Collections.shuffle(partials, random()); @@ -156,7 +156,7 @@ public final void testManyInitialManyPartialFinal() { // to move the data through the pipeline. public final void testManyInitialManyPartialFinalRunner() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, false /* no throwing ops */); @@ -178,7 +178,7 @@ protected void start(Driver driver, ActionListener listener) { // runner behaves correctly and also releases all resources (bigArrays) appropriately. public final void testManyInitialManyPartialFinalRunnerThrowing() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, true /* one throwing op */); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 954a1f179f259..1afa5d3c02330 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongGroupingAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -31,9 +32,12 @@ public class HashAggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max)))); + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max))) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 228fdf262cf62..bbbfd44014ffc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -24,8 +25,8 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 80ac57ed539e7..21ca59e0f45a4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,7 +25,7 @@ public class MvExpandOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new AbstractBlockSourceOperator(8 * 1024) { private int idx; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 3b2fac5271aa6..aee600b079b81 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArray; @@ -21,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -36,6 +38,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -44,7 +47,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { /** * Valid input to be sent to {@link #simple}; */ - protected abstract SourceOperator simpleInput(int size); + protected abstract SourceOperator simpleInput(BlockFactory blockFactory, int size); /** * Assert that output from {@link #simple} is correct for the @@ -80,15 +83,27 @@ public final void testSimpleLargeInput() { * in a sane way. */ public final void testSimpleCircuitBreaking() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()); + /* + * We build two CircuitBreakers - one for the input blocks and one for the operation itself. + * The input blocks don't count against the memory usage for the limited operator that we + * build. + */ + DriverContext inputFactoryContext = driverContext(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()) + .withCircuitBreaking(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); Exception e = expectThrows( CircuitBreakingException.class, - () -> assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)) + () -> drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()) ); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -98,15 +113,24 @@ public final void testSimpleCircuitBreaking() { * in ctors. */ public final void testSimpleWithCranky() { - CrankyCircuitBreakerService breaker = new CrankyCircuitBreakerService(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breaker).withCircuitBreaking(); - BlockFactory blockFactory = BlockFactory.getInstance(breaker.getBreaker("request"), bigArrays); + DriverContext inputFactoryContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); + + CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); + BlockFactory blockFactory = BlockFactory.getInstance(cranky.getBreaker(CircuitBreaker.REQUEST), bigArrays); try { - assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)); + List result = drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()); + Releasables.close(() -> Iterators.map(result.iterator(), p -> p::releaseBlocks)); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws } catch (CircuitBreakingException e) { + logger.info("broken", e); assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -139,10 +163,12 @@ protected final List oneDriverPerPageList(Iterator> source, Sup } private void assertSimple(DriverContext context, int size) { - List input = CannedSourceOperator.collectPages(simpleInput(size)); + List input = CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), size)); + // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. + List inputClone = CannedSourceOperator.deepCopyOf(input); BigArrays bigArrays = context.bigArrays().withCircuitBreaking(); List results = drive(simple(bigArrays).get(context), input.iterator()); - assertSimpleOutput(input, results); + assertSimpleOutput(inputClone, results); results.forEach(Page::releaseBlocks); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } @@ -180,7 +206,11 @@ public static void runDriver(List drivers) { "dummy-session", new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), () -> "dummy-driver", - new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), + new SequenceLongBlockSourceOperator( + BlockFactory.getNonBreakingInstance(), + LongStream.range(0, between(1, 100)), + between(1, 100) + ), List.of(), new PageConsumerOperator(page -> {}), Driver.DEFAULT_STATUS_INTERVAL, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index baa7842bdc1f9..6ec0183bbf224 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -90,7 +90,7 @@ private List randomProjection(int size) { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java index 0aa78f3ad0ab3..f7c3ee825d695 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java @@ -21,23 +21,27 @@ public class SequenceLongBlockSourceOperator extends AbstractBlockSourceOperator static final int DEFAULT_MAX_PAGE_POSITIONS = 8 * 1024; + private final BlockFactory blockFactory; + private final long[] values; - public SequenceLongBlockSourceOperator(LongStream values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(LongStream values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.toArray(); } - public SequenceLongBlockSourceOperator(List values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(List values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.stream().mapToLong(Long::longValue).toArray(); } @@ -48,7 +52,7 @@ protected Page createPage(int positionOffset, int length) { array[i] = values[positionOffset + i]; } currentPosition += length; - return new Page(BlockFactory.getNonBreakingInstance().newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile + return new Page(blockFactory.newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile } protected int remaining() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index f3c67f18589fa..1e72ecb0c3ee4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -25,7 +26,7 @@ public class StringExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index 78cff5897c917..9b87dbe01224a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -26,10 +26,6 @@ public class TupleBlockSourceOperator extends AbstractBlockSourceOperator { private final List> values; - public TupleBlockSourceOperator(Stream> values) { - this(BlockFactory.getNonBreakingInstance(), values, DEFAULT_MAX_PAGE_POSITIONS); - } - public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } @@ -40,14 +36,14 @@ public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public TupleBlockSourceOperator(List> values, int maxPagePositions) { + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values; - blockFactory = BlockFactory.getNonBreakingInstance(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index d79fde19f5487..9c4358e5d9ee0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; @@ -142,7 +143,13 @@ public void testNotInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), false, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), false, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + false, + 1 + ); BytesRef values = valuesBuilder.bytesRefView(); result.decodeValue(values); assertThat(values.length, equalTo(0)); @@ -163,7 +170,13 @@ public void testInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), true, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), true, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + true, + 1 + ); BytesRef keys = keysBuilder.bytesRefView(); if (testCase.type == ElementType.NULL) { assertThat(keys.length, equalTo(1)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 7491ffde6766e..c331b7ab013ae 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -37,16 +38,21 @@ import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.versionfield.Version; +import org.junit.After; +import java.lang.reflect.Field; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -143,8 +149,12 @@ protected String expectedToStringOfSimple() { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> ESTestCase.randomLong()), between(1, size * 2)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator( + blockFactory, + LongStream.range(0, size).map(l -> ESTestCase.randomLong()), + between(1, size * 2) + ); } @Override @@ -180,26 +190,48 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testRamBytesUsed() { + RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() { + @Override + public long accumulateObject(Object o, long shallowSize, Map fieldValues, Collection queue) { + if (o instanceof ElementType) { + return 0; // shared + } + if (o instanceof TopNEncoder) { + return 0; // shared + } + if (o instanceof CircuitBreaker) { + return 0; // shared + } + if (o instanceof BlockFactory) { + return 0; // shard + } + return super.accumulateObject(o, shallowSize, fieldValues, queue); + } + }; int topCount = 10_000; // We under-count by a few bytes because of the lists. In that end that's fine, but we need to account for it here. - long underCount = 100; - TopNOperator op = new TopNOperator.TopNOperatorFactory( - topCount, - List.of(LONG), - List.of(DEFAULT_UNSORTABLE), - List.of(new TopNOperator.SortOrder(0, true, false)), - pageSize - ).get(driverContext()); - long actualEmpty = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); - // But when we fill it then we're quite close - for (Page p : CannedSourceOperator.collectPages(simpleInput(topCount))) { - op.addInput(p); + long underCount = 200; + DriverContext context = driverContext(); + try ( + TopNOperator op = new TopNOperator.TopNOperatorFactory( + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false)), + pageSize + ).get(context) + ) { + long actualEmpty = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); + // But when we fill it then we're quite close + for (Page p : CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), topCount))) { + op.addInput(p); + } + long actualFull = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); + + // TODO empty it again and check. } - long actualFull = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); } public void testRandomTopN() { @@ -471,6 +503,7 @@ public void testCollectAllValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -559,6 +592,7 @@ public void testCollectAllValues_RandomMultiValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -590,9 +624,10 @@ private List> topNTwoColumns( try ( Driver driver = new Driver( driverContext, - new TupleBlockSourceOperator(inputValues, randomIntBetween(1, 1000)), + new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), limit, elementTypes, @@ -607,6 +642,7 @@ private List> topNTwoColumns( for (int i = 0; i < block1.getPositionCount(); i++) { outputValues.add(tuple(block1.isNull(i) ? null : block1.getLong(i), block2.isNull(i) ? null : block2.getLong(i))); } + page.releaseBlocks(); }), () -> {} ) @@ -848,6 +884,7 @@ private void assertSortingOnMV( TopNEncoder encoder, TopNOperator.SortOrder... sortOrders ) { + DriverContext driverContext = driverContext(); Block block = TestBlockBuilder.blockFromValues(values, blockType); assert block.mvOrdering() == Block.MvOrdering.UNORDERED : "Blocks created for this test must have unordered multi-values"; Page page = new Page(block); @@ -856,10 +893,11 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, List.of(blockType), @@ -878,6 +916,7 @@ private void assertSortingOnMV( } public void testRandomMultiValuesTopN() { + DriverContext driverContext = driverContext(); int rows = randomIntBetween(50, 100); int topCount = randomIntBetween(1, rows); int blocksCount = randomIntBetween(20, 30); @@ -969,8 +1008,9 @@ public void testRandomMultiValuesTopN() { } List>> actualValues = new ArrayList<>(); - List results = this.drive( + List results = drive( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -982,6 +1022,7 @@ public void testRandomMultiValuesTopN() { ); for (Page p : results) { readAsRows(actualValues, p); + p.releaseBlocks(); } List>> topNExpectedValues = expectedValues.stream() @@ -1003,13 +1044,15 @@ public void testIPSortingSingleValue() throws UnknownHostException { append(builder, new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip)))); } + DriverContext driverContext = driverContext(); List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1075,7 +1118,7 @@ public void testIPSortingUnorderedMultiValues() throws UnknownHostException { public void testIPSortingOrderedMultiValues() throws UnknownHostException { List> ips = new ArrayList<>(); - ips.add(List.of("123.4.245.23", "123.4.245.23")); + ips.add(List.of("123.4.245.23", "123.4.245.24")); ips.add(null); ips.add(List.of("104.30.244.2", "127.0.0.1")); ips.add(null); @@ -1092,17 +1135,17 @@ public void testIPSortingOrderedMultiValues() throws UnknownHostException { expectedDecodedIps.add(List.of("104.30.244.2", "127.0.0.1")); expectedDecodedIps.add(List.of("104.30.244.2", "124.255.255.255")); expectedDecodedIps.add(List.of("104.244.4.1")); - expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.23")); + expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.24")); } else { expectedDecodedIps.add(List.of("1.198.3.93", "2.3.4.5", "255.123.123.0")); expectedDecodedIps.add(List.of("104.30.244.2", "127.0.0.1")); expectedDecodedIps.add(List.of("104.30.244.2", "124.255.255.255")); - expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.23")); + expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.24")); expectedDecodedIps.add(List.of("104.244.4.1")); expectedDecodedIps.add(List.of("1.1.1.0", "32.183.93.40")); } - assertIPSortingOnMultiValues(ips, asc, Block.MvOrdering.ASCENDING, expectedDecodedIps); + assertIPSortingOnMultiValues(ips, asc, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, expectedDecodedIps); } private void assertIPSortingOnMultiValues( @@ -1128,12 +1171,14 @@ private void assertIPSortingOnMultiValues( } List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1210,12 +1255,14 @@ public void testZeroByte() { blocks.add(builderInt.build()); List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), 2, List.of(BYTES_REF, INT), @@ -1243,10 +1290,55 @@ public void testZeroByte() { assertThat((Integer) actual.get(1).get(1), equalTo(100)); } + public void testErrorBeforeFullyDraining() { + int maxPageSize = between(1, 100); + int topCount = maxPageSize * 4; + int docCount = topCount * 10; + List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); + try ( + Driver driver = new Driver( + driverContext, + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), + List.of( + new TopNOperator( + driverContext.blockFactory(), + nonBreakingBigArrays().breakerService().getBreaker("request"), + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, randomBoolean())), + maxPageSize + ) + ), + new PageConsumerOperator(p -> { + assertThat(p.getPositionCount(), equalTo(maxPageSize)); + if (actual.isEmpty()) { + readInto(actual, p); + } else { + p.releaseBlocks(); + throw new RuntimeException("boo"); + } + }), + () -> {} + ) + ) { + Exception e = expectThrows(RuntimeException.class, () -> runDriver(driver)); + assertThat(e.getMessage(), equalTo("boo")); + } + + ListMatcher values = matchesList(); + for (int i = 0; i < maxPageSize; i++) { + values = values.item((long) i); + } + assertMap(actual, matchesList().item(values)); + } + public void testCloseWithoutCompleting() { CircuitBreaker breaker = new MockBigArrays.LimitedBreaker(CircuitBreaker.REQUEST, ByteSizeValue.ofGb(1)); try ( TopNOperator op = new TopNOperator( + driverContext().blockFactory(), breaker, 2, List.of(INT), @@ -1257,7 +1349,23 @@ public void testCloseWithoutCompleting() { ) { op.addInput(new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock())); } - assertThat(breaker.getUsed(), equalTo(0L)); + } + + private final List breakers = new ArrayList<>(); + + @Override + protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allBreakersEmpty() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } } @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml index d72d09644a128..2e8c43379d690 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml @@ -60,6 +60,39 @@ setup: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 1434595272, "rx": 530605511}}}}' + - do: + indices.create: + index: test2 + body: + settings: + index: + mode: time_series + routing_path: [ dim ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + dim: + type: keyword + time_series_dimension: true + agg_metric: + type: aggregate_metric_double + metrics: + - max + default_metric: max + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": "A", "agg_metric": {"max": 10}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "dim": "B", "agg_metric": {"max": 20}}' + --- load everything: - do: @@ -109,3 +142,26 @@ filter on counter: esql.query: body: query: 'from test | where k8s.pod.network.tx == 1434577921' + +--- +from doc with aggregate_metric_double: + - do: + esql.query: + body: + query: 'from test2' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "agg_metric"} + - match: {columns.1.type: "unsupported"} + - match: {columns.2.name: "dim"} + - match: {columns.2.type: "keyword"} + - length: {values: 0} + +--- +stats on aggregate_metric_double: + - do: + catch: /Cannot use field \[agg_metric\] with unsupported type \[aggregate_metric_double\]/ + esql.query: + body: + query: 'FROM test2 | STATS max(agg_metric) BY dim ' diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml index 280a32aa10cd3..bae0e623d12a3 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml @@ -235,35 +235,36 @@ disjoint_mappings: - length: { values: 1 } - match: { values.0.0: 2 } - - do: - esql.query: - body: - query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' - - match: { columns.0.name: message1 } - - match: { columns.0.type: keyword } - - match: { columns.1.name: message2 } - - match: { columns.1.type: long } - - match: { columns.2.name: x } - - match: { columns.2.type: keyword } - - match: { columns.3.name: y } - - match: { columns.3.type: long } - - length: { values: 4 } - - match: { values.0.0: foo1 } - - match: { values.0.1: null } - - match: { values.0.2: foo1 } - - match: { values.0.3: null } - - match: { values.1.0: foo2 } - - match: { values.1.1: null } - - match: { values.1.2: foo2 } - - match: { values.1.3: null } - - match: { values.2.0: null } - - match: { values.2.1: 1 } - - match: { values.2.2: null } - - match: { values.2.3: 2 } - - match: { values.3.0: null } - - match: { values.3.1: 2 } - - match: { values.3.2: null } - - match: { values.3.3: 3 } +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +# - do: +# esql.query: +# body: +# query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' +# - match: { columns.0.name: message1 } +# - match: { columns.0.type: keyword } +# - match: { columns.1.name: message2 } +# - match: { columns.1.type: long } +# - match: { columns.2.name: x } +# - match: { columns.2.type: keyword } +# - match: { columns.3.name: y } +# - match: { columns.3.type: long } +# - length: { values: 4 } +# - match: { values.0.0: foo1 } +# - match: { values.0.1: null } +# - match: { values.0.2: foo1 } +# - match: { values.0.3: null } +# - match: { values.1.0: foo2 } +# - match: { values.1.1: null } +# - match: { values.1.2: foo2 } +# - match: { values.1.3: null } +# - match: { values.2.0: null } +# - match: { values.2.1: 1 } +# - match: { values.2.2: null } +# - match: { values.2.3: 2 } +# - match: { values.3.0: null } +# - match: { values.3.1: 2 } +# - match: { values.3.2: null } +# - match: { values.3.3: 3 } --- same_name_different_type: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec index d89f3337c081b..82fd27416c526 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec @@ -7,7 +7,8 @@ emp_no:integer |_index:keyword |_version:long 10002 |employees |1 ; -aliasWithSameName +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +aliasWithSameName-Ignore from employees [metadata _index, _version] | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index f8aeee1569f2e..800e36949f5ea 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -109,32 +108,33 @@ public void testFromStatsGroupingAvgWithAliases() { } private void testFromStatsGroupingAvgImpl(String command, String expectedGroupName, String expectedFieldName) { - EsqlQueryResponse results = run(command); - logger.info(results); - Assert.assertEquals(2, results.columns().size()); - - // assert column metadata - ColumnInfo valuesColumn = results.columns().get(0); - assertEquals(expectedFieldName, valuesColumn.name()); - assertEquals("double", valuesColumn.type()); - ColumnInfo groupColumn = results.columns().get(1); - assertEquals(expectedGroupName, groupColumn.name()); - assertEquals("long", groupColumn.type()); + try (EsqlQueryResponse results = run(command)) { + logger.info(results); + Assert.assertEquals(2, results.columns().size()); - // assert column values - List> valueValues = getValuesList(results); - assertEquals(2, valueValues.size()); - // This is loathsome, find a declarative way to assert the expected output. - if ((long) valueValues.get(0).get(1) == 1L) { - assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); - assertEquals(2L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); - } else if ((long) valueValues.get(0).get(1) == 2L) { - assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); - assertEquals(1L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); - } else { - fail("Unexpected group value: " + valueValues.get(0).get(0)); + // assert column metadata + ColumnInfo valuesColumn = results.columns().get(0); + assertEquals(expectedFieldName, valuesColumn.name()); + assertEquals("double", valuesColumn.type()); + ColumnInfo groupColumn = results.columns().get(1); + assertEquals(expectedGroupName, groupColumn.name()); + assertEquals("long", groupColumn.type()); + + // assert column values + List> valueValues = getValuesList(results); + assertEquals(2, valueValues.size()); + // This is loathsome, find a declarative way to assert the expected output. + if ((long) valueValues.get(0).get(1) == 1L) { + assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); + assertEquals(2L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); + } else if ((long) valueValues.get(0).get(1) == 2L) { + assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); + assertEquals(1L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); + } else { + fail("Unexpected group value: " + valueValues.get(0).get(0)); + } } } @@ -211,19 +211,20 @@ public void testFromGroupingByNumericFieldWithNulls() { } } client().admin().indices().prepareRefresh("test").get(); - EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data"); - logger.info(results); + try (EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data")) { + logger.info(results); - assertThat(results.columns(), hasSize(2)); - assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); - assertEquals("data", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertThat(results.columns(), hasSize(2)); + assertEquals("avg(count)", results.columns().get(0).name()); + assertEquals("double", results.columns().get(0).type()); + assertEquals("data", results.columns().get(1).name()); + assertEquals("long", results.columns().get(1).type()); - record Group(Long data, Double avg) {} - List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); - List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); - assertThat(actualGroups, equalTo(expectedGroups)); + record Group(Long data, Double avg) {} + List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); + List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); + assertThat(actualGroups, equalTo(expectedGroups)); + } } public void testFromStatsGroupingByKeyword() { @@ -332,18 +333,19 @@ record Group(double avg, long mi, long ma, long s, long c, String color) {} } public void testFromSortWithTieBreakerLimit() { - EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time"); - logger.info(results); - assertThat( - getValuesList(results), - contains( - List.of(1L, 44L, epoch + 2), - List.of(1L, 44L, epoch + 6), - List.of(1L, 44L, epoch + 10), - List.of(1L, 44L, epoch + 14), - List.of(1L, 44L, epoch + 18) - ) - ); + try (EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time")) { + logger.info(results); + assertThat( + getValuesList(results), + contains( + List.of(1L, 44L, epoch + 2), + List.of(1L, 44L, epoch + 6), + List.of(1L, 44L, epoch + 10), + List.of(1L, 44L, epoch + 14), + List.of(1L, 44L, epoch + 18) + ) + ); + } } public void testFromStatsProjectGroup() { @@ -778,10 +780,11 @@ public void testFromStatsLimit() { } public void testFromLimit() { - EsqlQueryResponse results = run("from test | keep data | limit 2"); - logger.info(results); - assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); - assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + try (EsqlQueryResponse results = run("from test | keep data | limit 2")) { + logger.info(results); + assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); + assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + } } public void testDropAllColumns() { @@ -1000,27 +1003,25 @@ public void testTopNPushedToLuceneOnSortedIndex() { ); int limit = randomIntBetween(1, 5); - EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time"); - logger.info(results); - Assert.assertEquals(1, results.columns().size()); - Assert.assertEquals(limit, getValuesList(results).size()); - - // assert column metadata - assertEquals("time", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + try (EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time")) { + logger.info(results); + Assert.assertEquals(1, results.columns().size()); + Assert.assertEquals(limit, getValuesList(results).size()); - boolean sortedDesc = "desc".equals(sortOrder); - var expected = LongStream.range(0, 40) - .map(i -> epoch + i) - .boxed() - .sorted(sortedDesc ? reverseOrder() : naturalOrder()) - .limit(limit) - .toList(); - var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); - assertThat(actual, equalTo(expected)); + // assert column metadata + assertEquals("time", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); - // clean-up - client().admin().indices().delete(new DeleteIndexRequest("sorted_test_index")).actionGet(); + boolean sortedDesc = "desc".equals(sortOrder); + var expected = LongStream.range(0, 40) + .map(i -> epoch + i) + .boxed() + .sorted(sortedDesc ? reverseOrder() : naturalOrder()) + .limit(limit) + .toList(); + var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); + assertThat(actual, equalTo(expected)); + } } /* diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 19893bf8072f1..2636a7cf3c133 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; @@ -18,6 +19,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportSettings; @@ -29,7 +31,9 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +@TestLogging(value = "org.elasticsearch.indices.breaker:TRACE", reason = "failing") @ESIntegTestCase.ClusterScope(scope = TEST, minNumDataNodes = 2, maxNumDataNodes = 4) +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99173") public class EsqlDisruptionIT extends EsqlActionIT { // copied from AbstractDisruptionTestCase diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index dba115f4f7c29..0b8b444a2b6a3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index d20179fd7baed..1295956645a6f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index 7ab2d656a59cb..be01f122f9a8f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index d70d0365aaf4d..7b83995bf0933 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendBoolean(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static boolean evalValue(BytesRefVector container, int index, BytesRef s public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index d2cf4b41770ce..4a8aebe9cd8ab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index 98310bb390392..ca237c1dcc4a7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToDatetimeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index a168d93e73ba3..27509a4a18e56 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToDegreesEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(DoubleVector container, int index) { @@ -71,7 +65,7 @@ private static double evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index d2b16e4b722cb..a6ab12763ddc2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 53e8edac3c5b3..5889cf151f0fa 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 9be5f1f2456b1..ff1c81f3f544f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index 653034f0c3bc9..197e5e5f2db36 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendDouble(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static double evalValue(BytesRefVector container, int index, BytesRef sc public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 54cc374c758fb..018517ae61d36 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java index 76d5c58961970..b62fa771e492c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIPFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 49f79cd0bcd3e..9529769a02200 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index e1b0db72ad7d9..7af8bdbf083ef 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 9a1394b9c02cf..a84367ab27a30 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 180e64f97e63b..bd7085764e341 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendInt(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static int evalValue(BytesRefVector container, int index, BytesRef scrat public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index 698db22c0ecc6..2312f94fec83e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index bf76fb0eb8a59..48e3e45d42f46 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index 116be245e3191..14ec5e41a04e5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index 02d043c641cb0..f0eae8bfccc44 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index cc825664cc331..8af9a14fd81be 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index 02bef2f9f9c2d..569df205855d3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(LongVector container, int index) { @@ -71,7 +65,7 @@ private static long evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 33ae94093dd85..6aa373e69b7cd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToRadiansEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(DoubleVector container, int index) { @@ -71,7 +65,7 @@ private static double evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 876344b1c35bc..8507395c6153a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BooleanVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index 8aa5148b21de4..7d6bf029fe80b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 8c7994a3c0a68..e0aa134286723 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(DoubleVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index 4e0249939cc91..7ef6c3df27025 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromIPEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index d076b38f49b91..abe206d5a5152 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(IntVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index 90448cb992cd2..be6c2648f9eb4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index 91e31c9626b5e..9ba24301875d2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index 281b881bd6141..69d2e0e106fa0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromVersionEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index ec8b16568c380..541e5b8c7af11 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 2ada365ce848e..89c896ccf1f43 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 9acad2f9481a6..3c78c24ea7b01 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 0cb7da2ed230f..0c0cb9ebfb525 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(LongVector container, int index) { @@ -71,7 +65,7 @@ private static long evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 3297fcffbe73b..38056be01487c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index 5f6b62e16de52..bead25f13dd6a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToVersionFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 3b15fe9f17293..67ad08a101dab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -36,7 +36,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } BooleanBlock v = (BooleanBlock) fieldVal; @@ -66,7 +66,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } BooleanBlock v = (BooleanBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index 6401664c9aa0d..36eaecf36e345 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -37,7 +37,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } BytesRefBlock v = (BytesRefBlock) fieldVal; @@ -69,7 +69,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } BytesRefBlock v = (BytesRefBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 0ec72b82e2438..072dc413168ec 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } DoubleBlock v = (DoubleBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } DoubleBlock v = (DoubleBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index 2bf14b26c6c5e..4d6a68ed67f26 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index ce5a95bee7699..fd0ee6bf57740 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index 711992a20763e..a84a4059b1dc0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index 67d3c123a6953..4c5798bed2e35 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -36,7 +36,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; @@ -66,7 +66,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index 93538708039b5..1731d0733b511 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -36,7 +36,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; @@ -66,7 +66,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index 6e16c8db4b896..afb5e1cb7cda1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -36,7 +36,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } BooleanBlock v = (BooleanBlock) fieldVal; @@ -66,7 +66,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } BooleanBlock v = (BooleanBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 99a671cf0a2df..41b487553dc8e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -37,7 +37,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } BytesRefBlock v = (BytesRefBlock) fieldVal; @@ -69,7 +69,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } BytesRefBlock v = (BytesRefBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index e40ff78d0d364..63da4bd86c673 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } DoubleBlock v = (DoubleBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } DoubleBlock v = (DoubleBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 9412930da53c5..46dedaed43a3d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } IntBlock v = (IntBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 1fac131f0de0c..8e17c8f08a906 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -35,7 +35,7 @@ public String name() { */ @Override public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; @@ -65,7 +65,7 @@ public Block evalNullable(Block fieldVal) { */ @Override public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { + if (fieldVal.mvSortedAscending()) { return evalAscendingNotNullable(fieldVal); } LongBlock v = (LongBlock) fieldVal; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 1af66cb4f50b0..de24b049ea575 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -9,9 +9,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasables; @@ -25,7 +27,6 @@ import java.util.Locale; import java.util.Map; -import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -48,7 +49,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel if (evaluator == null) { throw EsqlIllegalArgumentException.illegalDataType(sourceType); } - return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source()); + return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source(), dvrCtx); } @Override @@ -65,7 +66,7 @@ protected final TypeResolution resolveType() { ); } - protected abstract Map> evaluators(); + protected abstract Map> evaluators(); @Override public final Object fold() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 3ec6492ef0d8c..701b3fa67732c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -18,7 +20,6 @@ import java.math.BigInteger; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -30,10 +31,11 @@ public class ToBoolean extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( BOOLEAN, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, KEYWORD, ToBooleanFromStringEvaluator::new, DOUBLE, @@ -51,7 +53,9 @@ public ToBoolean(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 5049a80d075f9..eb23e460b88ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; @@ -18,7 +20,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -29,12 +30,13 @@ public class ToDatetime extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DATETIME, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, KEYWORD, ToDatetimeFromStringEvaluator::new, DOUBLE, @@ -50,7 +52,9 @@ public ToDatetime(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index ec59446989bca..299e8cfe8643e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -29,16 +30,29 @@ * to degrees. */ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, ToDegreesEvaluator::new, INTEGER, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromIntEvaluator(field, source), source), + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromIntEvaluator(field, source, driverContext), + source, + driverContext + ), LONG, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromLongEvaluator(field, source), source), + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromLongEvaluator(field, source, driverContext), + source, + driverContext + ), UNSIGNED_LONG, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromUnsignedLongEvaluator(field, source), source) + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), + source, + driverContext + ) ); public ToDegrees(Source source, Expression field) { @@ -46,7 +60,9 @@ public ToDegrees(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index dc8527637c7a3..690f7a66cbece 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -30,10 +31,11 @@ public class ToDouble extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToDoubleFromBooleanEvaluator::new, DATETIME, @@ -53,7 +55,9 @@ public ToDouble(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 0931033758dbb..d55b9d23975e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; @@ -25,15 +26,23 @@ public class ToIP extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of(IP, (fieldEval, source) -> fieldEval, KEYWORD, ToIPFromStringEvaluator::new); + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( + IP, + (fieldEval, source, driverContext) -> fieldEval, + KEYWORD, + ToIPFromStringEvaluator::new + ); public ToIP(Source source, Expression field) { super(source, field); } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 1d26c4724a423..0fcf62ed3864a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; @@ -31,10 +32,11 @@ public class ToInteger extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( INTEGER, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToIntegerFromBooleanEvaluator::new, DATETIME, @@ -54,7 +56,9 @@ public ToInteger(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index ffb31a77cb1fc..8e50dd8540ffd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; @@ -32,12 +33,13 @@ public class ToLong extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, DATETIME, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToLongFromBooleanEvaluator::new, KEYWORD, @@ -55,7 +57,9 @@ public ToLong(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 8064303e204d5..8bb5180e09752 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -29,16 +30,29 @@ * to radians. */ public class ToRadians extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, ToRadiansEvaluator::new, INTEGER, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromIntEvaluator(field, source), source), + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromIntEvaluator(field, source, driverContext), + source, + driverContext + ), LONG, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromLongEvaluator(field, source), source), + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromLongEvaluator(field, source, driverContext), + source, + driverContext + ), UNSIGNED_LONG, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromUnsignedLongEvaluator(field, source), source) + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), + source, + driverContext + ) ); public ToRadians(Source source, Expression field) { @@ -46,7 +60,9 @@ public ToRadians(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 428c1f32b1fc7..af895ab7c56cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -21,7 +23,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -38,10 +39,11 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( KEYWORD, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToStringFromBooleanEvaluator::new, DATETIME, @@ -55,7 +57,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper INTEGER, ToStringFromIntEvaluator::new, TEXT, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, VERSION, ToStringFromVersionEvaluator::new, UNSIGNED_LONG, @@ -67,7 +69,9 @@ public ToString(Source source, @Named("v") Expression v) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 83deed6b18490..396aa03f39dc6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; @@ -33,10 +34,11 @@ public class ToUnsignedLong extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( UNSIGNED_LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, DATETIME, ToUnsignedLongFromLongEvaluator::new, BOOLEAN, @@ -56,7 +58,9 @@ public ToUnsignedLong(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index 0051bee45eead..559e2fc4f89fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -19,7 +21,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; @@ -27,9 +28,10 @@ public class ToVersion extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.ofEntries( - Map.entry(VERSION, (fieldEval, source) -> fieldEval), + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.ofEntries( + Map.entry(VERSION, (fieldEval, source, driverContext) -> fieldEval), Map.entry(KEYWORD, ToVersionFromStringEvaluator::new), Map.entry(TEXT, ToVersionFromStringEvaluator::new) ); @@ -39,7 +41,9 @@ public ToVersion(Source source, @Named("v") Expression v) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 714112b2db543..cef65b6c477c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -26,6 +26,8 @@ import java.math.BigInteger; import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.function.BiFunction; import java.util.stream.DoubleStream; @@ -385,7 +387,17 @@ private static > void putInOrder(List mvData, Block.M switch (ordering) { case UNORDERED -> { } - case ASCENDING -> Collections.sort(mvData); + case DEDUPLICATED_UNORDERD -> { + var dedup = new LinkedHashSet<>(mvData); + mvData.clear(); + mvData.addAll(dedup); + } + case DEDUPLICATED_AND_SORTED_ASCENDING -> { + var dedup = new HashSet<>(mvData); + mvData.clear(); + mvData.addAll(dedup); + Collections.sort(mvData); + } default -> throw new UnsupportedOperationException("unsupported ordering [" + ordering + "]"); } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index 4086a1a729c14..de355cd675089 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -56,10 +57,13 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; +import java.util.function.Function; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -798,6 +802,129 @@ public void testUpdateIndexTemplateToMigrateFromDataStreamLifecycleToIlm() throw }); } + public void testGetDataStreamResponse() throws Exception { + // ILM rolls over every 2 documents + RolloverAction rolloverIlmAction = new RolloverAction(RolloverConditions.newBuilder().addMaxIndexDocsCondition(2L).build()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(rolloverIlmAction.getWriteableName(), rolloverIlmAction)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase)); + PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); + assertAcked(client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get()); + + putComposableIndexTemplate( + indexTemplateName, + null, + List.of(dataStreamName + "*"), + Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policy).build(), + null, + null + ); + CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); + + indexDocs(dataStreamName, 2); + + // wait to rollover + assertBusy(() -> { + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices().size(), is(2)); + }); + + // prefer_ilm false in the index template + putComposableIndexTemplate( + indexTemplateName, + null, + List.of(dataStreamName + "*"), + Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policy).put(IndexSettings.PREFER_ILM, false).build(), + null, + null + ); + + client().execute( + PutDataStreamLifecycleAction.INSTANCE, + new PutDataStreamLifecycleAction.Request(new String[] { dataStreamName }, TimeValue.timeValueDays(90)) + ).actionGet(); + + // rollover again - at this point this data stream should have 2 backing indices managed by ILM and the write index managed by + // data stream lifecycle + indexDocs(dataStreamName, 2); + + assertBusy(() -> { + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + GetDataStreamAction.Response.DataStreamInfo dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + List indices = dataStreamInfo.getDataStream().getIndices(); + assertThat(indices.size(), is(3)); + + // the prefer_ilm value from the template should be reflected in the response at the top level + assertThat(dataStreamInfo.templatePreferIlmValue(), is(false)); + // the template ILM policy should still be reflected at the top level + assertThat(dataStreamInfo.getIlmPolicy(), is(policy)); + + List backingIndices = getBackingIndices(dataStreamName); + String firstGenerationIndex = backingIndices.get(0); + String secondGenerationIndex = backingIndices.get(1); + String writeIndex = backingIndices.get(2); + assertThat( + indices.stream().map(i -> i.getName()).toList(), + containsInAnyOrder(firstGenerationIndex, secondGenerationIndex, writeIndex) + ); + + Function> backingIndexSupplier = indexName -> indices.stream() + .filter(index -> index.getName().equals(indexName)) + .findFirst(); + + // let's assert the policy is reported for all indices (as it's present in the index template) and the value of the + // prefer_ilm setting remains true for the first 2 generations and is false for the write index (the generation after rollover) + Optional firstGenSettings = backingIndexSupplier.apply(firstGenerationIndex); + assertThat(firstGenSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).preferIlm(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).managedBy(), is(ManagedBy.ILM)); + Optional secondGenSettings = backingIndexSupplier.apply(secondGenerationIndex); + assertThat(secondGenSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).preferIlm(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).managedBy(), is(ManagedBy.ILM)); + Optional writeIndexSettings = backingIndexSupplier.apply(writeIndex); + assertThat(writeIndexSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).preferIlm(), is(false)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).managedBy(), is(ManagedBy.LIFECYCLE)); + + // with the current configuratino, the next generation index will be managed by DSL + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.LIFECYCLE)); + }); + + // remove ILM policy and prefer_ilm from template + putComposableIndexTemplate(indexTemplateName, null, List.of(dataStreamName + "*"), Settings.builder().build(), null, null); + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + GetDataStreamAction.Response.DataStreamInfo dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + // since the ILM related settings are gone from the index template, this data stream should now be managed by lifecycle + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.LIFECYCLE)); + + // disable data stream lifecycle on the data stream. the future generations will be UNMANAGED + client().execute( + PutDataStreamLifecycleAction.INSTANCE, + new PutDataStreamLifecycleAction.Request(new String[] { dataStreamName }, TimeValue.timeValueDays(90), false) + ).actionGet(); + + getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest).actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + // since the ILM related settings are gone from the index template and the lifeclcye is disabled, this data stream should now be + // managed unmanaged + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.UNMANAGED)); + } + static void indexDocs(String dataStream, int numDocs) { BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numDocs; i++) { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java index 7ae7d4b0497e0..dbf489e8abf23 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java @@ -103,8 +103,16 @@ public class TextExpansionQueryIT extends PyTorchModelRestTestCase { RAW_MODEL_SIZE = Base64.getDecoder().decode(BASE_64_ENCODED_MODEL).length; } + public void testRankFeaturesTextExpansionQuery() throws IOException { + testTextExpansionQuery("rank_features"); + } + + public void testSparseVectorTextExpansionQuery() throws IOException { + testTextExpansionQuery("sparse_vector"); + } + @SuppressWarnings("unchecked") - public void testTextExpansionQuery() throws IOException { + private void testTextExpansionQuery(String tokensFieldType) throws IOException { String modelId = "text-expansion-test"; String indexName = modelId + "-index"; @@ -140,7 +148,7 @@ public void testTextExpansionQuery() throws IOException { } // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); bulkIndexDocs(inputs, tokenWeights, indexName); // Test text expansion search against the indexed rank features @@ -157,7 +165,15 @@ public void testTextExpansionQuery() throws IOException { } } - public void testWithPipelineIngest() throws IOException { + public void testRankFeaturesWithPipelineIngest() throws IOException { + testWithPipelineIngest("rank_features"); + } + + public void testSparseVectorWithPipelineIngest() throws IOException { + testWithPipelineIngest("sparse_vector"); + } + + private void testWithPipelineIngest(String tokensFieldType) throws IOException { String modelId = "text-expansion-pipeline-test"; String indexName = modelId + "-index"; @@ -182,7 +198,7 @@ public void testWithPipelineIngest() throws IOException { ); // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); var pipelineId = putPipeline(modelId); bulkIndexThroughPipeline(inputs, indexName, pipelineId); @@ -201,7 +217,15 @@ public void testWithPipelineIngest() throws IOException { } } - public void testWithDotsInTokenNames() throws IOException { + public void testRankFeaturesWithDotsInTokenNames() throws IOException { + testWithDotsInTokenNames("rank_features"); + } + + public void testSparseVectorWithDotsInTokenNames() throws IOException { + testWithDotsInTokenNames("sparse_vector"); + } + + private void testWithDotsInTokenNames(String tokensFieldType) throws IOException { String modelId = "text-expansion-dots-in-tokens"; String indexName = modelId + "-index"; @@ -214,7 +238,7 @@ public void testWithDotsInTokenNames() throws IOException { List inputs = List.of("these are my words."); // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); var pipelineId = putPipeline(modelId); bulkIndexThroughPipeline(inputs, indexName, pipelineId); @@ -278,18 +302,18 @@ protected void createTextExpansionModel(String modelId) throws IOException { client().performRequest(request); } - private void createRankFeaturesIndex(String indexName) throws IOException { + private void createIndex(String indexName, String tokensFieldType) throws IOException { Request createIndex = new Request("PUT", "/" + indexName); createIndex.setJsonEntity(""" - { - "mappings": { - "properties": { - "text_field": { - "type": "text" - }, - "ml.tokens": { - "type": "rank_features" - } + { + "mappings": { + "properties": { + "text_field": { + "type": "text" + }, + "ml.tokens": { + """ + "\"type\": \"" + tokensFieldType + "\"" + """ + } } } }"""); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index c8e4fd488394a..d2d6bd4fcb443 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -1227,7 +1227,8 @@ public Collection createComponents( threadPool, new NodeLoadDetector(memoryTracker), systemAuditor, - nodeAvailabilityZoneMapper + nodeAvailabilityZoneMapper, + client ) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java index d1d66299db67f..acd0a124d59c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java @@ -59,7 +59,7 @@ static void validateMinimumVersion(ModelPackageConfig resolvedModelPackageConfig if (MlConfigVersion.getMinMlConfigVersion(state.nodes()).before(minimumVersion)) { throw new ActionRequestValidationException().addValidationError( format( - "The model [%s] requires that all nodes are at least version [%s]", + "The model [%s] requires that all nodes have ML config version [%s] or higher", resolvedModelPackageConfig.getPackagedModelId(), resolvedModelPackageConfig.getMinimumVersion() ) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6f97689222196..a0a2a81791550 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -71,6 +72,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.ml.inference.deployment.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.utils.TaskRetriever; @@ -78,6 +80,7 @@ import java.time.Instant; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; @@ -132,7 +135,7 @@ protected void masterOperation( Task task, PutTrainedModelAction.Request request, ClusterState state, - ActionListener listener + ActionListener finalResponseListener ) { TrainedModelConfig config = request.getTrainedModelConfig(); try { @@ -140,7 +143,9 @@ protected void masterOperation( config.ensureParsedDefinition(xContentRegistry); } } catch (IOException ex) { - listener.onFailure(ExceptionsHelper.badRequestException("Failed to parse definition for [{}]", ex, config.getModelId())); + finalResponseListener.onFailure( + ExceptionsHelper.badRequestException("Failed to parse definition for [{}]", ex, config.getModelId()) + ); return; } @@ -150,7 +155,7 @@ protected void masterOperation( try { config.getModelDefinition().getTrainedModel().validate(); } catch (ElasticsearchException ex) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException("Definition for [{}] has validation failures.", ex, config.getModelId()) ); return; @@ -158,7 +163,7 @@ protected void masterOperation( TrainedModelType trainedModelType = TrainedModelType.typeFromTrainedModel(config.getModelDefinition().getTrainedModel()); if (trainedModelType == null) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Unknown trained model definition class [{}]", config.getModelDefinition().getTrainedModel().getName() @@ -171,7 +176,7 @@ protected void masterOperation( // Set the model type from the definition config = new TrainedModelConfig.Builder(config).setModelType(trainedModelType).build(); } else if (trainedModelType != config.getModelType()) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "{} [{}] does not match the model definition type [{}]", TrainedModelConfig.MODEL_TYPE.getPreferredName(), @@ -183,7 +188,7 @@ protected void masterOperation( } if (config.getInferenceConfig().isTargetTypeSupported(config.getModelDefinition().getTrainedModel().targetType()) == false) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Model [{}] inference config type [{}] does not support definition target type [{}]", config.getModelId(), @@ -196,7 +201,7 @@ protected void masterOperation( TransportVersion minCompatibilityVersion = config.getModelDefinition().getTrainedModel().getMinimalCompatibilityVersion(); if (state.getMinTransportVersion().before(minCompatibilityVersion)) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Cannot create model [{}] while cluster upgrade is in progress.", config.getModelId() @@ -223,7 +228,7 @@ protected void masterOperation( } if (ModelAliasMetadata.fromState(state).getModelId(trainedModelConfig.getModelId()) != null) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "requested model_id [{}] is the same as an existing model_alias. Model model_aliases and ids must be unique", config.getModelId() @@ -233,7 +238,7 @@ protected void masterOperation( } if (TrainedModelAssignmentMetadata.fromState(state).hasDeployment(trainedModelConfig.getModelId())) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Cannot create model [{}] the id is the same as an current model deployment", config.getModelId() @@ -242,6 +247,14 @@ protected void masterOperation( return; } + ActionListener finalResponseAction = ActionListener.wrap((configToReturn) -> { + finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + }, finalResponseListener::onFailure); + + ActionListener verifyClusterAndModelArchitectures = ActionListener.wrap((configToReturn) -> { + verifyMlNodesAndModelArchitectures(configToReturn, client, threadPool, finalResponseAction); + }, finalResponseListener::onFailure); + ActionListener finishedStoringListener = ActionListener.wrap(bool -> { TrainedModelConfig configToReturn = trainedModelConfig.clearDefinition().build(); if (modelPackageConfigHolder.get() != null) { @@ -250,19 +263,19 @@ protected void masterOperation( modelPackageConfigHolder.get(), request.isWaitForCompletion(), ActionListener.wrap( - downloadTriggered -> listener.onResponse(new PutTrainedModelAction.Response(configToReturn)), - listener::onFailure + downloadTriggered -> verifyClusterAndModelArchitectures.onResponse(configToReturn), + finalResponseListener::onFailure ) ); } else { - listener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); } - }, listener::onFailure); + }, finalResponseListener::onFailure); var isPackageModel = config.isPackagedModel(); ActionListener checkStorageIndexSizeListener = ActionListener.wrap( r -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), finishedStoringListener, isPackageModel), - listener::onFailure + finalResponseListener::onFailure ); ActionListener tagsModelIdCheckListener = ActionListener.wrap(r -> { @@ -276,7 +289,7 @@ protected void masterOperation( IndexStats indexStats = stats.getIndices().get(InferenceIndexConstants.nativeDefinitionStore()); if (indexStats != null && indexStats.getTotal().getStore().getSizeInBytes() > MAX_NATIVE_DEFINITION_INDEX_SIZE.getBytes()) { - listener.onFailure( + finalResponseListener.onFailure( new ElasticsearchStatusException( "Native model store has exceeded the maximum acceptable size of {}, " + "please delete older unused pytorch models", @@ -293,7 +306,7 @@ protected void masterOperation( checkStorageIndexSizeListener.onResponse(null); return; } - listener.onFailure( + finalResponseListener.onFailure( new ElasticsearchStatusException( "Unable to calculate stats for definition storage index [{}], please try again later", RestStatus.SERVICE_UNAVAILABLE, @@ -305,11 +318,11 @@ protected void masterOperation( return; } checkStorageIndexSizeListener.onResponse(null); - }, listener::onFailure); + }, finalResponseListener::onFailure); ActionListener modelIdTagCheckListener = ActionListener.wrap( r -> checkTagsAgainstModelIds(request.getTrainedModelConfig().getTags(), tagsModelIdCheckListener), - listener::onFailure + finalResponseListener::onFailure ); ActionListener handlePackageAndTagsListener = ActionListener.wrap(r -> { @@ -318,29 +331,61 @@ protected void masterOperation( try { TrainedModelValidator.validatePackage(trainedModelConfig, resolvedModelPackageConfig, state); } catch (ValidationException e) { - listener.onFailure(e); + finalResponseListener.onFailure(e); return; } modelPackageConfigHolder.set(resolvedModelPackageConfig); setTrainedModelConfigFieldsFromPackagedModel(trainedModelConfig, resolvedModelPackageConfig, xContentRegistry); checkModelIdAgainstTags(trainedModelConfig.getModelId(), modelIdTagCheckListener); - }, listener::onFailure)); + }, finalResponseListener::onFailure)); } else { checkModelIdAgainstTags(trainedModelConfig.getModelId(), modelIdTagCheckListener); } - }, listener::onFailure); + }, finalResponseListener::onFailure); checkForExistingTask( client, trainedModelConfig.getModelId(), request.isWaitForCompletion(), - listener, + finalResponseListener, handlePackageAndTagsListener, request.timeout() ); } + void verifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + Client client, + ThreadPool threadPool, + ActionListener configToReturnListener + ) { + ActionListener addWarningHeaderOnFailureListener = new ActionListener() { + @Override + public void onResponse(TrainedModelConfig config) { + assert Objects.equals(config, configToReturn); + configToReturnListener.onResponse(configToReturn); + } + + @Override + public void onFailure(Exception e) { + HeaderWarning.addWarning(e.getMessage()); + configToReturnListener.onResponse(configToReturn); + } + }; + + callVerifyMlNodesAndModelArchitectures(configToReturn, addWarningHeaderOnFailureListener, client, threadPool); + } + + void callVerifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + ActionListener failureListener, + Client client, + ThreadPool threadPool + ) { + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(failureListener, client, threadPool, configToReturn); + } + /** * This method is package private for testing */ diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2a1844ea1fccf..ab215106c8ed0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -807,7 +807,7 @@ public static String nodeFilter(DiscoveryNode node, TaskParams params) { + id + "] on node [" + JobNodeSelector.nodeNameAndVersion(node) - + "], because the data frame analytics requires a node of version [" + + "], because the data frame analytics requires a node with ML config version [" + TaskParams.VERSION_INTRODUCED + "] or higher"; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java index 38b827b120bf6..475ca4ef2a7ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -45,7 +46,6 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.MachineLearningField; -import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.GetDatafeedRunningStateAction; import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; @@ -80,7 +80,7 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.common.validation.SourceDestValidator.REMOTE_CLUSTERS_CONFIG_TOO_OLD; +import static org.elasticsearch.xpack.core.common.validation.SourceDestValidator.REMOTE_CLUSTERS_TRANSPORT_TOO_OLD; /* This class extends from TransportMasterNodeAction for cluster state observing purposes. The stop datafeed api also redirect the elected master node. @@ -249,7 +249,7 @@ public void onFailure(Exception e) { checkRemoteConfigVersions( datafeedConfigHolder.get(), remoteAliases, - (cn) -> MlConfigVersion.fromVersion(remoteClusterService.getConnection(cn).getVersion()) + (cn) -> remoteClusterService.getConnection(cn).getTransportVersion() ); createDataExtractor(task, job, datafeedConfigHolder.get(), params, waitForTaskListener); } @@ -299,17 +299,17 @@ public void onFailure(Exception e) { static void checkRemoteConfigVersions( DatafeedConfig config, List remoteClusters, - Function configVersionSupplier + Function transportVersionSupplier ) { - Optional> minVersionAndReason = config.minRequiredConfigVersion(); + Optional> minVersionAndReason = config.minRequiredTransportVersion(); if (minVersionAndReason.isPresent() == false) { return; } final String reason = minVersionAndReason.get().v2(); - final MlConfigVersion minVersion = minVersionAndReason.get().v1(); + final TransportVersion minVersion = minVersionAndReason.get().v1(); List clustersTooOld = remoteClusters.stream() - .filter(cn -> configVersionSupplier.apply(cn).before(minVersion)) + .filter(cn -> transportVersionSupplier.apply(cn).before(minVersion)) .collect(Collectors.toList()); if (clustersTooOld.isEmpty()) { return; @@ -317,7 +317,7 @@ static void checkRemoteConfigVersions( throw ExceptionsHelper.badRequestException( Messages.getMessage( - REMOTE_CLUSTERS_CONFIG_TOO_OLD, + REMOTE_CLUSTERS_TRANSPORT_TOO_OLD, minVersion.toString(), reason, Strings.collectionToCommaDelimitedString(clustersTooOld) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index efc8bd84c6350..ea52c4918d05b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -15,6 +15,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NodeAvailabilityZoneMapper; import org.elasticsearch.xpack.ml.inference.assignment.planning.AllocationReducer; +import org.elasticsearch.xpack.ml.inference.deployment.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.job.NodeLoad; import org.elasticsearch.xpack.ml.job.NodeLoadDetector; import org.elasticsearch.xpack.ml.notifications.SystemAuditor; @@ -78,6 +80,7 @@ public class TrainedModelAssignmentClusterService implements ClusterStateListene private final NodeLoadDetector nodeLoadDetector; private final SystemAuditor systemAuditor; private final NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper; + private final Client client; private volatile int maxMemoryPercentage; private volatile boolean useAuto; private volatile int maxOpenJobs; @@ -91,7 +94,8 @@ public TrainedModelAssignmentClusterService( ThreadPool threadPool, NodeLoadDetector nodeLoadDetector, SystemAuditor systemAuditor, - NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper + NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper, + Client client ) { this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = Objects.requireNonNull(threadPool); @@ -104,6 +108,7 @@ public TrainedModelAssignmentClusterService( this.maxLazyMLNodes = MachineLearning.MAX_LAZY_ML_NODES.get(settings); this.maxMLNodeSize = MachineLearning.MAX_ML_NODE_SIZE.get(settings).getBytes(); this.allocatedProcessorsScale = MachineLearning.ALLOCATED_PROCESSORS_SCALE.get(settings); + this.client = client; // Only nodes that can possibly be master nodes really need this service running if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(this); @@ -150,14 +155,14 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String @Override public void clusterChanged(ClusterChangedEvent event) { - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + if (eventStateHasGlobalBlockStateNotRecoveredBlock(event)) { return; } if (event.localNodeMaster() == false) { return; } - if (event.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION)) { + if (eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(event)) { // we should not try to rebalance assignments while there may be nodes running on a version // prior to introducing distributed model allocation. // But we should remove routing to removed or shutting down nodes. @@ -165,6 +170,10 @@ public void clusterChanged(ClusterChangedEvent event) { return; } + if (event.nodesAdded()) { + logMlNodeHeterogeneity(); + } + Optional rebalanceReason = detectReasonToRebalanceModels(event); if (rebalanceReason.isPresent()) { // As this produces a cluster state update task, we are certain that if the persistent @@ -187,6 +196,42 @@ public void clusterChanged(ClusterChangedEvent event) { } } + boolean eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(ClusterChangedEvent event) { + return event.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION); + } + + boolean eventStateHasGlobalBlockStateNotRecoveredBlock(ClusterChangedEvent event) { + return event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK); + } + + void logMlNodeHeterogeneity() { + ActionListener> architecturesListener = getArchitecturesSetActionListener(); + MlPlatformArchitecturesUtil.getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + static ActionListener> getArchitecturesSetActionListener() { + ActionListener> architecturesListener = new ActionListener>() { + @Override + public void onResponse(Set architectures) { + if (architectures.size() > 1) { + logger.warn( + format( + "Heterogeneous platform architectures were detected among ML nodes. " + + "This will prevent the deployment of some trained models. Distinct platform architectures detected: %s", + architectures + ) + ); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Failed to detect heterogeneity among ML nodes with exception: ", e); + } + }; + return architecturesListener; + } + private void removeRoutingToRemovedOrShuttingDownNodes(ClusterChangedEvent event) { if (areAssignedNodesRemoved(event)) { submitUnbatchedTask("removing routing entries for removed or shutting down nodes", new ClusterStateUpdateTask() { @@ -486,51 +531,89 @@ private void rebalanceAssignments( String reason, ActionListener listener ) { - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { - logger.debug(() -> format("Rebalancing model allocations because [%s]", reason)); - TrainedModelAssignmentMetadata.Builder rebalancedMetadata; - try { - rebalancedMetadata = rebalanceAssignments(clusterState, modelToAdd); - } catch (Exception e) { - listener.onFailure(e); - return; - } + ActionListener> architecturesListener = ActionListener.wrap((mlNodesArchitectures) -> { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + logger.debug(() -> format("Rebalancing model allocations because [%s]", reason)); + + TrainedModelAssignmentMetadata.Builder rebalancedMetadata; + try { + rebalancedMetadata = rebalanceAssignments(clusterState, modelToAdd); + } catch (Exception e) { + listener.onFailure(e); + return; + } - submitUnbatchedTask(reason, new ClusterStateUpdateTask() { + submitUnbatchedTask(reason, new ClusterStateUpdateTask() { - private volatile boolean isUpdated; - private volatile boolean isChanged; + private volatile boolean isUpdated; + private volatile boolean isChanged; - @Override - public ClusterState execute(ClusterState currentState) { + @Override + public ClusterState execute(ClusterState currentState) { - if (areClusterStatesCompatibleForRebalance(clusterState, currentState)) { - isUpdated = true; - ClusterState updatedState = update(currentState, rebalancedMetadata); - isChanged = updatedState != currentState; - return updatedState; + currentState = stopPlatformSpecificModelsInHeterogeneousClusters( + currentState, + mlNodesArchitectures, + modelToAdd, + clusterState + ); + + if (areClusterStatesCompatibleForRebalance(clusterState, currentState)) { + isUpdated = true; + ClusterState updatedState = update(currentState, rebalancedMetadata); + isChanged = updatedState != currentState; + return updatedState; + } + + rebalanceAssignments(currentState, modelToAdd, reason, listener); + return currentState; } - rebalanceAssignments(currentState, modelToAdd, reason, listener); - return currentState; - } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - if (isUpdated) { - if (isChanged) { - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute(() -> systemAuditor.info(Messages.getMessage(Messages.INFERENCE_DEPLOYMENT_REBALANCED, reason))); + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + if (isUpdated) { + if (isChanged) { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> systemAuditor.info(Messages.getMessage(Messages.INFERENCE_DEPLOYMENT_REBALANCED, reason)) + ); + } + listener.onResponse(TrainedModelAssignmentMetadata.fromState(newState)); } - listener.onResponse(TrainedModelAssignmentMetadata.fromState(newState)); } - } + }); }); - }); + }, listener::onFailure); + + MlPlatformArchitecturesUtil.getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + ClusterState stopPlatformSpecificModelsInHeterogeneousClusters( + ClusterState updatedState, + Set mlNodesArchitectures, + Optional modelToAdd, + ClusterState clusterState + ) { + if (mlNodesArchitectures.size() > 1 && modelToAdd.isPresent()) { + String reasonToStop = format( + "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([%s]); " + + "detected architectures: %s", + modelToAdd.get().getModelId(), + mlNodesArchitectures + ); + updatedState = callSetToStopping(reasonToStop, modelToAdd.get().getDeploymentId(), clusterState); + } + return updatedState; + } + + ClusterState callSetToStopping(String reasonToStop, String deploymentId, ClusterState clusterState) { + return setToStopping(clusterState, deploymentId, reasonToStop); } private boolean areClusterStatesCompatibleForRebalance(ClusterState source, ClusterState target) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 03f34dacb1faf..fcb44d0f391fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -164,9 +164,7 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener getModelListener = ActionListener.wrap(getModelResponse -> { - assert getModelResponse.getResources().results().size() == 1; - TrainedModelConfig modelConfig = getModelResponse.getResources().results().get(0); + ActionListener getVerifiedModel = ActionListener.wrap((modelConfig) -> { processContext.modelInput.set(modelConfig.getInput()); if (modelConfig.getInferenceConfig() instanceof NlpConfig nlpConfig) { @@ -209,15 +207,57 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener verifyModelAndClusterArchitecturesListener = ActionListener.wrap( + getModelResponse -> { + assert getModelResponse.getResources().results().size() == 1; + TrainedModelConfig modelConfig = getModelResponse.getResources().results().get(0); + + verifyMlNodesAndModelArchitectures(modelConfig, client, threadPool, getVerifiedModel); + + }, + failedDeploymentListener::onFailure + ); + executeAsyncWithOrigin( client, ML_ORIGIN, GetTrainedModelsAction.INSTANCE, new GetTrainedModelsAction.Request(task.getParams().getModelId()), - getModelListener + verifyModelAndClusterArchitecturesListener ); } + void verifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + Client client, + ThreadPool threadPool, + ActionListener configToReturnListener + ) { + ActionListener verifyConfigListener = new ActionListener() { + @Override + public void onResponse(TrainedModelConfig config) { + assert Objects.equals(config, configToReturn); + configToReturnListener.onResponse(configToReturn); + } + + @Override + public void onFailure(Exception e) { + configToReturnListener.onFailure(e); + } + }; + + callVerifyMlNodesAndModelArchitectures(configToReturn, verifyConfigListener, client, threadPool); + } + + void callVerifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + ActionListener configToReturnListener, + Client client, + ThreadPool threadPool + ) { + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(configToReturnListener, client, threadPool, configToReturn); + } + private SearchRequest vocabSearchRequest(VocabularyConfig vocabularyConfig, String modelId) { return client.prepareSearch(vocabularyConfig.getIndex()) .setQuery(new IdsQueryBuilder().addIds(VocabularyConfig.docId(modelId))) @@ -394,11 +434,11 @@ class ProcessContext { private final PyTorchResultProcessor resultProcessor; private final PyTorchStateStreamer stateStreamer; private final PriorityProcessWorkerExecutorService priorityProcessWorker; + private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); + private final AtomicInteger timeoutCount = new AtomicInteger(); private volatile Instant startTime; private volatile Integer numThreadsPerAllocation; private volatile Integer numAllocations; - private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); - private final AtomicInteger timeoutCount = new AtomicInteger(); private volatile boolean isStopped; private static final TimeValue COMPLETION_TIMEOUT = TimeValue.timeValueMinutes(3); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java new file mode 100644 index 0000000000000..ff8ac1dbb3eec --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.deployment; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.plugins.Platforms; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.util.Iterator; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class MlPlatformArchitecturesUtil { + + public static void getMlNodesArchitecturesSet(ActionListener> architecturesListener, Client client, ThreadPool threadPool) { + ActionListener listener = MlPlatformArchitecturesUtil.getArchitecturesSetFromNodesInfoResponseListener( + threadPool, + architecturesListener + ); + + NodesInfoRequest request = MlPlatformArchitecturesUtil.getNodesInfoBuilderWithMlNodeArchitectureInfo(client).request(); + executeAsyncWithOrigin(client, ML_ORIGIN, NodesInfoAction.INSTANCE, request, listener); + } + + static ActionListener getArchitecturesSetFromNodesInfoResponseListener( + ThreadPool threadPool, + ActionListener> architecturesListener + ) { + return ActionListener.wrap(nodesInfoResponse -> { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + architecturesListener.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse)); + }); + }, architecturesListener::onFailure); + } + + static NodesInfoRequestBuilder getNodesInfoBuilderWithMlNodeArchitectureInfo(Client client) { + return client.admin().cluster().prepareNodesInfo().clear().setNodesIds("ml:true").setOs(true).setPlugins(true); + } + + private static Set getArchitecturesSetFromNodesInfoResponse(NodesInfoResponse nodesInfoResponse) { + return nodesInfoResponse.getNodes() + .stream() + .filter(node -> node.getNode().hasRole(DiscoveryNodeRole.ML_ROLE.roleName())) + .map(node -> { + OsInfo osInfo = node.getInfo(OsInfo.class); + return Platforms.platformName(osInfo.getName(), osInfo.getArch()); + }) + .collect(Collectors.toUnmodifiableSet()); + } + + public static void verifyMlNodesAndModelArchitectures( + ActionListener successOrFailureListener, + Client client, + ThreadPool threadPool, + TrainedModelConfig configToReturn + ) { + String modelID = configToReturn.getModelId(); + String modelPlatformArchitecture = configToReturn.getPlatformArchitecture(); + + String modifiedPlatformArchitecture = (modelPlatformArchitecture == null && modelID.contains("linux-x86_64")) + ? "linux-x86_64" + : null; + ActionListener> architecturesListener = ActionListener.wrap((architectures) -> { + verifyMlNodesAndModelArchitectures(architectures, modifiedPlatformArchitecture, modelID); + successOrFailureListener.onResponse(configToReturn); + }, successOrFailureListener::onFailure); + + getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + static void verifyMlNodesAndModelArchitectures(Set architectures, String modelPlatformArchitecture, String modelID) + throws IllegalArgumentException, IllegalStateException { + + String architecture = null; + Iterator architecturesIterator = architectures.iterator(); + // If there are no ML nodes at all in the current cluster we assume that any that are added later will work + if (modelPlatformArchitecture == null || architectures.isEmpty() || architecturesIterator.hasNext() == false) { + return; + } + + if (architectures.size() > 1) { + throw new IllegalStateException( + format( + "ML nodes in this cluster have multiple platform architectures, but can only have one for this model ([%s]); " + + "expected [%s]; " + + "but was %s", + modelID, + modelPlatformArchitecture, + architectures + ) + ); + } + + if (Objects.equals(architecturesIterator.next(), modelPlatformArchitecture) == false) { + + throw new IllegalArgumentException( + format( + "The model being deployed ([%s]) is platform specific and incompatible with ML nodes in the cluster; " + + "expected [%s]; " + + "but was %s", + modelID, + modelPlatformArchitecture, + architectures + ) + ); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessor.java index 6b92a9349c4ea..4b925464d985b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessor.java @@ -107,17 +107,19 @@ public void process(PyTorchProcess process) { if (result.inferenceResult() != null) { processInferenceResult(result); - } - ThreadSettings threadSettings = result.threadSettings(); - if (threadSettings != null) { - threadSettingsConsumer.accept(threadSettings); + } else if (result.threadSettings() != null) { + threadSettingsConsumer.accept(result.threadSettings()); processThreadSettings(result); - } - if (result.ackResult() != null) { + } else if (result.ackResult() != null) { processAcknowledgement(result); - } - if (result.errorResult() != null) { + } else if (result.errorResult() != null) { processErrorResult(result); + } else { + // will should only get here if the native process + // has produced a partially valid result, one that + // is accepted by the parser but does not have any + // content + handleUnknownResultType(result); } } } catch (Exception e) { @@ -208,6 +210,26 @@ void processErrorResult(PyTorchResult result) { } } + void handleUnknownResultType(PyTorchResult result) { + if (result.requestId() != null) { + PendingResult pendingResult = pendingResults.remove(result.requestId()); + if (pendingResult == null) { + logger.error(() -> format("[%s] no pending result listener for unknown result type [%s]", modelId, result)); + } else { + String msg = format("[%s] pending result listener cannot handle unknown result type [%s]", modelId, result); + logger.error(msg); + var errorResult = new ErrorResult(msg); + pendingResult.listener.onResponse(new PyTorchResult(result.requestId(), null, null, null, null, null, errorResult)); + } + } else { + // Cannot look up the listener without a request id + // all that can be done in this case is log a message. + // The result parser requires a request id so this + // code should not be hit. + logger.error(() -> format("[%s] cannot process unknown result type [%s]", modelId, result)); + } + } + public synchronized ResultStats getResultStats() { long currentMs = currentTimeMsSupplier.getAsLong(); long currentPeriodStartTimeMs = startTime + Intervals.alignToFloor(currentMs - startTime, REPORTING_PERIOD_MS); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java index 2997af2e5a1a8..a24e671d1fe25 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NativeMemoryCapacity; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -346,7 +347,7 @@ static String nodeNameOrId(DiscoveryNode node) { public static String nodeNameAndVersion(DiscoveryNode node) { String nodeNameOrID = nodeNameOrId(node); StringBuilder builder = new StringBuilder("{").append(nodeNameOrID).append('}'); - builder.append('{').append("version=").append(node.getVersion()).append('}'); + builder.append('{').append("ML config version=").append(MlConfigVersion.fromNode(node)).append('}'); return builder.toString(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index b19c0fb670a59..15b1993dc0586 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -179,7 +179,7 @@ public static String nodeFilter(DiscoveryNode node, Job job) { + jobId + "] on node [" + JobNodeSelector.nodeNameAndVersion(node) - + "], because the job's model snapshot requires a node of version [" + + "], because the job's model snapshot requires a node with ML config version [" + job.getModelSnapshotMinVersion() + "] or higher"; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java index d99147e6b4e98..f8755b282c6a1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java @@ -11,13 +11,16 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfigTests; +import java.net.InetAddress; import java.util.Map; import static org.mockito.Mockito.mock; @@ -31,12 +34,14 @@ public void testValidateMinimumVersion() { .setMinimumVersion("9999.0.0") .build(); - DiscoveryNode node = mock(DiscoveryNode.class); final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.CURRENT.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.CURRENT); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1name", + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9301), + attributes, + DiscoveryNodeRole.roles() + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); @@ -52,7 +57,7 @@ public void testValidateMinimumVersion() { assertEquals( "Validation Failed: 1: The model [" + packageConfig.getPackagedModelId() - + "] requires that all nodes are at least version [9999.0.0];", + + "] requires that all nodes have ML config version [9999.0.0] or higher;", e.getMessage() ); } @@ -63,12 +68,11 @@ public void testValidateMinimumVersion() { ModelPackageConfigTests.randomModulePackageConfig() ).setMinimumVersion(MlConfigVersion.CURRENT.toString()).build(); - DiscoveryNode node = mock(DiscoveryNode.class); - final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_7_0.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.V_8_7_0); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9300), + Version.V_8_7_0 + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); @@ -82,9 +86,9 @@ public void testValidateMinimumVersion() { assertEquals( "Validation Failed: 1: The model [" + packageConfigCurrent.getPackagedModelId() - + "] requires that all nodes are at least version [" + + "] requires that all nodes have ML config version [" + MlConfigVersion.CURRENT - + "];", + + "] or higher;", e.getMessage() ); } @@ -95,12 +99,11 @@ public void testValidateMinimumVersion() { ModelPackageConfigTests.randomModulePackageConfig() ).setMinimumVersion("_broken_version_").build(); - DiscoveryNode node = mock(DiscoveryNode.class); - final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_7_0.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.V_8_7_0); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9300), + Version.V_8_7_0 + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java index 514a1e2243531..f708ef1fb2959 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java @@ -11,9 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; @@ -21,6 +24,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -50,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigTests; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.util.Collections; @@ -65,6 +70,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; public class TransportPutTrainedModelActionTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -205,6 +215,42 @@ public void testCheckForExistingTaskReturnsTask() { assertThat(returnedModel.getResponse().getModelId(), is(trainedModel.getModelId())); } + public void testVerifyMlNodesAndModelArchitectures_GivenIllegalArgumentException_ThenSetHeaderWarning() { + + TransportPutTrainedModelAction actionSpy = spy(createTransportPutTrainedModelAction()); + @SuppressWarnings("unchecked") + ArgumentCaptor> failureListener = ArgumentCaptor.forClass(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener mockConfigToReturnListener = mock(ActionListener.class); + TrainedModelConfig mockConfigToReturn = mock(TrainedModelConfig.class); + doNothing().when(mockConfigToReturnListener).onResponse(any()); + + doNothing().when(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + actionSpy.verifyMlNodesAndModelArchitectures(mockConfigToReturn, null, threadPool, mockConfigToReturnListener); + verify(actionSpy).verifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + verify(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), failureListener.capture(), any(), any()); + + String warningMessage = "TEST HEADER WARNING"; + failureListener.getValue().onFailure(new IllegalArgumentException(warningMessage)); + assertWarnings(warningMessage); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenArchitecturesMatch_ThenTriggerOnResponse() { + + TransportPutTrainedModelAction actionSpy = spy(createTransportPutTrainedModelAction()); + @SuppressWarnings("unchecked") + ArgumentCaptor> successListener = ArgumentCaptor.forClass(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener mockConfigToReturnListener = mock(ActionListener.class); + TrainedModelConfig mockConfigToReturn = mock(TrainedModelConfig.class); + + doNothing().when(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + actionSpy.verifyMlNodesAndModelArchitectures(mockConfigToReturn, null, threadPool, mockConfigToReturnListener); + verify(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), successListener.capture(), any(), any()); + + ensureNoWarnings(); + } + private static void prepareGetTrainedModelResponse(Client client, List trainedModels) { doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") @@ -220,6 +266,30 @@ private static void prepareGetTrainedModelResponse(Client client, List clusterVersions = Map.of( + Map clusterVersions = Map.of( "modern_cluster_1", - MlConfigVersion.CURRENT, + TransportVersion.current(), "modern_cluster_2", - MlConfigVersion.CURRENT, + TransportVersion.current(), "old_cluster_1", - MlConfigVersion.V_7_0_0 + TransportVersions.V_7_0_0 ); Map field = Map.of("runtime_field_foo", Map.of("type", "keyword", "script", "")); @@ -137,7 +138,7 @@ public void testRemoteClusterVersionCheck() { assertThat( ex.getMessage(), containsString( - "remote clusters are expected to run at least config version [7.11.0] (reason: [runtime_mappings]), " + "remote clusters are expected to run at least transport version [7110099] (reason: [runtime_mappings]), " + "but the following clusters were too old: [old_cluster_1]" ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index d37edcd85946a..9e9266c8302e8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -7,12 +7,18 @@ package org.elasticsearch.xpack.ml.inference.assignment; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.message.Message; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -27,10 +33,13 @@ import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Strings; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -55,17 +64,24 @@ import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutorTests; import org.elasticsearch.xpack.ml.notifications.SystemAuditor; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; +import org.elasticsearch.xpack.ml.test.MockAppender; +import org.junit.After; import org.junit.Before; +import org.mockito.Mockito; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import java.util.function.Function; import static java.util.Map.entry; +import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; @@ -77,7 +93,13 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { @@ -87,9 +109,12 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { private NodeLoadDetector nodeLoadDetector; private SystemAuditor systemAuditor; private NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper; + private Client client; + private static MockAppender appender; + private static Logger testLogger1 = LogManager.getLogger(TrainedModelAssignmentClusterService.class); @Before - public void setupObjects() { + public void setupObjects() throws IllegalAccessException { clusterService = mock(ClusterService.class); ClusterSettings clusterSettings = new ClusterSettings( Settings.EMPTY, @@ -111,6 +136,122 @@ public void setupObjects() { nodeLoadDetector = new NodeLoadDetector(memoryTracker); systemAuditor = mock(SystemAuditor.class); + client = mock(Client.class); + + appender = new MockAppender("trace_appender"); + appender.start(); + Loggers.addAppender(testLogger1, appender); + } + + @After + public void cleanup() { + appender.stop(); + Loggers.removeAppender(testLogger1, appender); + } + + public void testLogMlNodeHeterogeneity_GivenZeroOrOneArchitectures_ThenNothing() throws InterruptedException { + Set architecturesSet = new HashSet<>(randomList(0, 1, () -> randomAlphaOfLength(10))); + + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + + underTestListener.onResponse(architecturesSet); + + LogEvent lastEvent = appender.getLastEventAndReset(); + assertNull(lastEvent); + } + + public void testLogMlNodeHeterogeneity_GivenTwoArchitecture_ThenWarn() throws InterruptedException { + String nodeArch = randomAlphaOfLength(10); + Set architecturesSet = Set.of(nodeArch, nodeArch + "2"); // architectures must be different + + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + underTestListener.onResponse(architecturesSet); + + LogEvent lastEvent = appender.getLastEventAndReset(); + + assertEquals(Level.WARN, lastEvent.getLevel()); + + Message m = lastEvent.getMessage(); + String fm = m.getFormattedMessage(); + String expected = Strings.format( + "Heterogeneous platform architectures were detected among ML nodes. " + + "This will prevent the deployment of some trained models. Distinct platform architectures detected: %s", + architecturesSet + ); + + assertEquals(expected, fm); + } + + public void testLogMlNodeHeterogeneity_GivenFailure_ThenError() throws InterruptedException { + RuntimeException e = new RuntimeException("Test Runtime Exception"); + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + underTestListener.onFailure(e); + + LogEvent lastEvent = appender.getLastEventAndReset(); + + assertEquals(Level.ERROR, lastEvent.getLevel()); + + Message m = lastEvent.getMessage(); + String fm = m.getFormattedMessage(); + + assertEquals("Failed to detect heterogeneity among ML nodes with exception: ", fm); + assertEquals(e, lastEvent.getThrown()); + } + + public void testClusterChanged_GivenNodesAdded_ThenLogMlNodeHeterogeneityCalled() { + nodeAvailabilityZoneMapper = mock(NodeAvailabilityZoneMapper.class); + TrainedModelAssignmentClusterService serviceSpy = spy(createClusterService(randomInt(5))); + doNothing().when(serviceSpy).logMlNodeHeterogeneity(); + doReturn(false).when(serviceSpy).eventStateHasGlobalBlockStateNotRecoveredBlock(any()); + doReturn(false).when(serviceSpy).eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(any()); + + ClusterChangedEvent mockNodesAddedEvent = mock(ClusterChangedEvent.class); + ClusterState mockState = mock(ClusterState.class); + doReturn(mockState).when(mockNodesAddedEvent).state(); + Metadata mockMetadata = mock(Metadata.class); + doReturn(mockMetadata).when(mockState).getMetadata(); + doReturn(null).when(mockState).custom(anyString()); + + doReturn(true).when(mockNodesAddedEvent).localNodeMaster(); + doReturn(true).when(mockNodesAddedEvent).nodesAdded(); + + serviceSpy.clusterChanged(mockNodesAddedEvent); + Mockito.verify(serviceSpy).logMlNodeHeterogeneity(); + Mockito.verify(mockNodesAddedEvent).nodesAdded(); + } + + public void testStopPlatformSpecificModelsInHeterogeneousClusters_GivenMultipleMlNodeArchitectures_ThenCallSetToStopping() { + nodeAvailabilityZoneMapper = mock(NodeAvailabilityZoneMapper.class); + TrainedModelAssignmentClusterService serviceSpy = spy(createClusterService(randomInt(5))); + + Set architecturesSet = new HashSet<>(randomList(2, 5, () -> randomAlphaOfLength(10))); + ClusterState mockUpdatedState = mock(ClusterState.class); + ClusterState mockClusterState = mock(ClusterState.class); + StartTrainedModelDeploymentAction.TaskParams mockModelToAdd = mock(StartTrainedModelDeploymentAction.TaskParams.class); + Optional optionalModelToAdd = Optional.of(mockModelToAdd); + String modelId = randomAlphaOfLength(10); + String deploymentId = randomAlphaOfLength(10); + when(mockModelToAdd.getModelId()).thenReturn(modelId); + when(mockModelToAdd.getDeploymentId()).thenReturn(deploymentId); + + String reasonToStop = format( + "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([%s]); " + + "detected architectures: %s", + modelId, + architecturesSet + ); + + doReturn(mockUpdatedState).when(serviceSpy).callSetToStopping(reasonToStop, deploymentId, mockClusterState); + + ClusterState updatedMockClusterState = serviceSpy.stopPlatformSpecificModelsInHeterogeneousClusters( + mockUpdatedState, + architecturesSet, + optionalModelToAdd, + mockClusterState + ); + + verify(serviceSpy).callSetToStopping(reasonToStop, deploymentId, mockClusterState); } public void testUpdateModelRoutingTable() { @@ -1878,7 +2019,8 @@ private TrainedModelAssignmentClusterService createClusterService(int maxLazyNod threadPool, nodeLoadDetector, systemAuditor, - nodeAvailabilityZoneMapper + nodeAvailabilityZoneMapper, + client ); } @@ -1948,4 +2090,36 @@ private static StartTrainedModelDeploymentAction.TaskParams newParams( ); } + protected void assertAsync( + Consumer> function, + T expected, + CheckedConsumer onAnswer, + Consumer onException + ) throws InterruptedException { + + CountDownLatch latch = new CountDownLatch(1); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + if (expected == null) { + fail("expected an exception but got a response"); + } else { + assertThat(r, equalTo(expected)); + } + if (onAnswer != null) { + onAnswer.accept(r); + } + }, e -> { + if (onException == null) { + logger.error("got unexpected exception", e); + fail("got unexpected exception: " + e.getMessage()); + } else { + onException.accept(e); + } + }), latch); + + function.accept(listener); + latch.countDown(); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java index 0bc898f434030..028c4b48ad355 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java @@ -108,4 +108,5 @@ public void testRejectedExecution() { assertThat(rejectedCount.intValue(), equalTo(1)); } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java new file mode 100644 index 0000000000000..28fc3db10cbe8 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.deployment; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.plugins.Platforms; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MlPlatformArchitecturesUtilTests extends ESTestCase { + + public void testGetNodesOsArchitectures() throws InterruptedException { + var threadPool = mock(ThreadPool.class); + var mockExectutorServervice = mock(ExecutorService.class); + doNothing().when(mockExectutorServervice).execute(any()); + when(threadPool.executor(anyString())).thenReturn(mockExectutorServervice); + + var mockNodesInfoResponse = mock(NodesInfoResponse.class); + List nodeInfoList = randomNodeInfos(4); + when(mockNodesInfoResponse.getNodes()).thenReturn(nodeInfoList); + + var expected = nodeInfoList.stream().filter(node -> node.getNode().hasRole(DiscoveryNodeRole.ML_ROLE.roleName())).map(node -> { + OsInfo osInfo = node.getInfo(OsInfo.class); + return Platforms.platformName(osInfo.getName(), osInfo.getArch()); + }).collect(Collectors.toUnmodifiableSet()); + + assertAsync(new Consumer>>() { + @Override + public void accept(ActionListener> setActionListener) { + final ActionListener nodesInfoResponseActionListener = MlPlatformArchitecturesUtil + .getArchitecturesSetFromNodesInfoResponseListener(threadPool, setActionListener); + nodesInfoResponseActionListener.onResponse(mockNodesInfoResponse); + } + + }, expected, null, null); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenNullModelArchitecture_ThenNothing() { + var architectures = nArchitectures(randomIntBetween(2, 10)); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, null, randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenZeroArches_ThenNothing() { + var architectures = new HashSet(); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenOneArchMatches_ThenNothing() { + Set architectures = nArchitectures(1); + String architecture = architectures.iterator().next(); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, architecture, randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenAtLeastTwoArches_ThenThrowsISE() { + var architectures = nArchitectures(randomIntBetween(2, 10)); + var modelId = randomAlphaOfLength(10); + var requiredArch = randomAlphaOfLength(10); + String message = "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([" + + modelId + + "]); " + + "expected [" + + requiredArch + + "]; but was " + + architectures + + ""; + + Throwable exception = expectThrows( + IllegalStateException.class, + "Expected IllegalStateException but no exception was thrown", + () -> MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, requiredArch, modelId) + ); + assertEquals(exception.getMessage(), message); + } + + public void testVerifyArchitectureMatchesModelPlatformArchitecture_GivenRequiredArchMatches_ThenNothing() { + var requiredArch = randomAlphaOfLength(10); + + var modelId = randomAlphaOfLength(10); + + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures( + new HashSet<>(Collections.singleton(requiredArch)), + requiredArch, + modelId + ); + } + + public void testVerifyArchitectureMatchesModelPlatformArchitecture_GivenRequiredArchDoesNotMatch_ThenThrowsIAE() { + var requiredArch = randomAlphaOfLength(10); + String architecturesStr = requiredArch + "-DIFFERENT"; + + var modelId = randomAlphaOfLength(10); + String message = "The model being deployed ([" + + modelId + + "]) is platform specific and incompatible with ML nodes in the cluster; " + + "expected [" + + requiredArch + + "]; but was [" + + architecturesStr + + "]"; + + Throwable exception = expectThrows( + IllegalArgumentException.class, + "Expected IllegalArgumentException but no exception was thrown", + () -> MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(Set.of(architecturesStr), requiredArch, modelId) + ); + assertEquals(exception.getMessage(), message); + } + + private Set nArchitectures(Integer n) { + Set architectures = new HashSet(n); + for (int i = 0; i < n; i++) { + architectures.add(randomAlphaOfLength(10)); + } + return architectures; + } + + private List randomNodeInfos(int max) { + assertTrue(max > 0); + int n = randomInt(max); + List nodeInfos = new ArrayList<>(n); + for (int i = 0; i < n; i++) { + nodeInfos.add(mockNodeInfo()); + } + return nodeInfos; + } + + private NodeInfo mockNodeInfo() { + var mockNodeInfo = mock(NodeInfo.class); + var mockDiscoveryNode = mock(DiscoveryNode.class); + when(mockNodeInfo.getNode()).thenReturn(mockDiscoveryNode); + when(mockDiscoveryNode.hasRole(DiscoveryNodeRole.ML_ROLE.roleName())).thenReturn(randomBoolean()); + var mockOsInfo = mock(OsInfo.class); + when(mockNodeInfo.getInfo(OsInfo.class)).thenReturn(mockOsInfo); + when(mockOsInfo.getArch()).thenReturn(randomAlphaOfLength(10)); + when(mockOsInfo.getName()).thenReturn(randomAlphaOfLength(10)); + + return mockNodeInfo; + } + + protected void assertAsync( + Consumer> function, + T expected, + CheckedConsumer onAnswer, + Consumer onException + ) throws InterruptedException { + + CountDownLatch latch = new CountDownLatch(1); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + if (expected == null) { + fail("expected an exception but got a response"); + } else { + assertThat(r, equalTo(expected)); + } + if (onAnswer != null) { + onAnswer.accept(r); + } + }, e -> { + if (onException == null) { + logger.error("got unexpected exception", e); + fail("got unexpected exception: " + e.getMessage()); + } else { + onException.accept(e); + } + }), latch); + + function.accept(listener); + latch.countDown(); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessorTests.java index e172f4ffb528c..860da3140f4fe 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessorTests.java @@ -153,6 +153,23 @@ public void testPendingRequestAreCalledAtShutdown() { } } + public void testsHandleUnknownResult() { + var processor = new PyTorchResultProcessor("deployment-foo", settings -> {}); + var listener = new AssertingResultListener( + r -> assertThat( + r.errorResult().error(), + containsString("[deployment-foo] pending result listener cannot handle unknown result type") + ) + ); + + processor.registerRequest("no-result-content", listener); + + processor.process( + mockNativeProcess(List.of(new PyTorchResult("no-result-content", null, null, null, null, null, null)).iterator()) + ); + assertTrue(listener.hasResponse); + } + private static class AssertingResultListener implements ActionListener { boolean hasResponse; final Consumer responseAsserter; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java index 19546b37c00cd..ab815aad543b8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java @@ -42,7 +42,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -81,12 +80,25 @@ public void setup() { when(memoryTracker.getJobMemoryRequirement(anyString(), anyString())).thenReturn(JOB_MEMORY_REQUIREMENT.getBytes()); } - public void testNodeNameAndVersion() { + public void testNodeNameAndVersionForRecentNode() { TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); - Map attributes = new HashMap<>(); - attributes.put("unrelated", "attribute"); + Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, "10.0.0", "unrelated", "attribute"); DiscoveryNode node = DiscoveryNodeUtils.create("_node_name1", "_node_id1", ta, attributes, ROLES_WITHOUT_ML); - assertEquals("{_node_name1}{version=" + node.getVersion() + "}", JobNodeSelector.nodeNameAndVersion(node)); + assertEquals("{_node_name1}{ML config version=10.0.0}", JobNodeSelector.nodeNameAndVersion(node)); + } + + public void testNodeNameAndVersionForOldNode() { + TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); + Map attributes = Map.of("unrelated", "attribute"); + DiscoveryNode node = new DiscoveryNode( + "_node_name2", + "_node_id2", + ta, + attributes, + ROLES_WITH_ML, + VersionInformation.inferVersions(Version.V_8_7_0) + ); + assertEquals("{_node_name2}{ML config version=8.7.0}", JobNodeSelector.nodeNameAndVersion(node)); } public void testNodeNameAndMlAttributes() { @@ -869,12 +881,12 @@ public void testSelectLeastLoadedMlNode_reasonsAreInDeterministicOrder() { assertThat( result.getExplanation(), equalTo( - "Not opening job [incompatible_type_job] on node [{_node_name1}{version=" - + Version.CURRENT + "Not opening job [incompatible_type_job] on node [{_node_name1}{ML config version=" + + MlConfigVersion.CURRENT + "}], " + "because this node does not support jobs of type [incompatible_type]|" - + "Not opening job [incompatible_type_job] on node [{_node_name2}{version=" - + Version.CURRENT + + "Not opening job [incompatible_type_job] on node [{_node_name2}{ML config version=" + + MlConfigVersion.CURRENT + "}], " + "because this node does not support jobs of type [incompatible_type]" ) @@ -946,7 +958,10 @@ public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() node -> nodeFilter(node, job) ); PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false); - assertThat(result.getExplanation(), containsString("job's model snapshot requires a node of version [7.3.0] or higher")); + assertThat( + result.getExplanation(), + containsString("job's model snapshot requires a node with ML config version [7.3.0] or higher") + ); assertNull(result.getExecutorNode()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java new file mode 100644 index 0000000000000..99c3c58f4ee81 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.test; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.filter.RegexFilter; +import org.apache.logging.log4j.message.Message; + +public class MockAppender extends AbstractAppender { + public LogEvent lastEvent; + + public MockAppender(final String name) throws IllegalAccessException { + super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null, false); + } + + @Override + public void append(LogEvent event) { + lastEvent = event.toImmutable(); + } + + Message lastMessage() { + return lastEvent.getMessage(); + } + + public LogEvent getLastEventAndReset() { + LogEvent toReturn = lastEvent; + lastEvent = null; + return toReturn; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java index 91224a8246169..b1ee1b77998ec 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java @@ -17,7 +17,6 @@ import com.nimbusds.jose.jwk.JWK; import com.nimbusds.jose.jwk.OctetSequenceKey; import com.nimbusds.jose.jwk.RSAKey; -import com.nimbusds.jose.util.Base64URL; import com.nimbusds.jwt.SignedJWT; import org.apache.logging.log4j.LogManager; @@ -40,9 +39,10 @@ import java.util.Arrays; import java.util.List; -import java.util.function.Supplier; import java.util.stream.Stream; +import static org.elasticsearch.xpack.security.authc.jwt.JwtUtil.toStringRedactSignature; + public interface JwtSignatureValidator extends Releasable { Logger logger = LogManager.getLogger(JwtSignatureValidator.class); @@ -361,7 +361,7 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws final String id = jwt.getHeader().getKeyID(); final JWSAlgorithm alg = jwt.getHeader().getAlgorithm(); - tracer.append("Filtering [{}] possible JWKs to verifying signature for JWT [{}].", jwks.size(), getSafePrintableJWT(jwt)); + tracer.append("Filtering [{}] possible JWKs to verifying signature for JWT [{}].", jwks.size(), toStringRedactSignature(jwt)); // If JWT has optional kid header, and realm JWKs have optional kid attribute, any mismatches JWT.kid vs JWK.kid can be ignored. // Keep any JWKs if JWK optional kid attribute is missing. Keep all JWKs if JWT optional kid header is missing. @@ -399,7 +399,11 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws int attempt = 0; int maxAttempts = jwksConfigured.size(); - tracer.append("Attempting to verify signature for JWT [{}] against [{}] possible JWKs.", getSafePrintableJWT(jwt), maxAttempts); + tracer.append( + "Attempting to verify signature for JWT [{}] against [{}] possible JWKs.", + toStringRedactSignature(jwt), + maxAttempts + ); for (final JWK jwk : jwksConfigured) { attempt++; if (jwt.verify(createJwsVerifier(jwk))) { @@ -429,7 +433,7 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws ); } } - throw new ElasticsearchException("JWT [" + getSafePrintableJWT(jwt).get() + "] signature verification failed."); + throw new ElasticsearchException("JWT [" + toStringRedactSignature(jwt).get() + "] signature verification failed."); } } @@ -458,15 +462,4 @@ interface PkcJwkSetReloadNotifier { void reloaded(); } - /** - * @param jwt The signed JWT - * @return A print safe supplier to describe a JWT that redacts the signature. While the signature is not generally sensitive, - * we don't want to leak the entire JWT to the log to avoid a possible replay. - */ - private Supplier getSafePrintableJWT(SignedJWT jwt) { - Base64URL[] parts = jwt.getParsedParts(); - assert parts.length == 3; - return () -> parts[0].toString() + "." + parts[1].toString() + "."; - } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index 3e3533f028b38..9168c5c0925bd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.security.authc.jwt; +import com.nimbusds.jose.JWSObject; import com.nimbusds.jose.jwk.JWK; import com.nimbusds.jose.jwk.JWKSet; +import com.nimbusds.jose.util.Base64URL; import com.nimbusds.jose.util.JSONObjectUtils; +import com.nimbusds.jwt.JWT; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; @@ -59,6 +62,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.function.Supplier; import javax.net.ssl.HostnameVerifier; @@ -389,4 +393,23 @@ public void close() { closed = true; } } + + /** + * @param jwt The signed JWT + * @return A print safe supplier to describe a JWT that redacts the signature. While the signature is not generally sensitive, + * we don't want to leak the entire JWT to the log to avoid a possible replay. + */ + public static Supplier toStringRedactSignature(JWT jwt) { + if (jwt instanceof JWSObject) { + Base64URL[] parts = jwt.getParsedParts(); + assert parts.length == 3; + assert parts[0] != null; + assert parts[1] != null; + assert parts[2] != null; + assert Objects.equals(parts[2], ((JWSObject) jwt).getSignature()); + return () -> parts[0] + "." + parts[1] + "."; + } else { + return jwt::getParsedString; + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index 73bc36c94e2d5..754d2a82dd835 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -93,6 +93,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.jwt.JwtUtil; import java.io.IOException; import java.net.URI; @@ -293,14 +294,18 @@ void getUserClaims( .triggerReload(ActionListener.wrap(v -> { getUserClaims(accessToken, idToken, expectedNonce, false, claimsListener); }, ex -> { - LOGGER.trace("Attempted and failed to refresh JWK cache upon token validation failure", e); + LOGGER.debug("Attempted and failed to refresh JWK cache upon token validation failure", e); claimsListener.onFailure(ex); })); } else { + LOGGER.debug("Failed to parse or validate the ID Token", e); claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } } catch (com.nimbusds.oauth2.sdk.ParseException | ParseException | JOSEException e) { - LOGGER.debug("ID Token: [{}], Nonce: [{}]", idToken.getParsedString(), expectedNonce); + LOGGER.debug( + () -> format("ID Token: [%s], Nonce: [%s]", JwtUtil.toStringRedactSignature(idToken).get(), expectedNonce.toString()), + e + ); claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java index ac1b2f30ec06d..708621ee4a6c8 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java @@ -91,15 +91,6 @@ public SnapshotRetentionTask( this.historyStore = historyStore; } - private static String formatSnapshots(Map> snapshotMap) { - return snapshotMap.entrySet() - .stream() - .map( - e -> e.getKey() + ": [" + e.getValue().stream().map(si -> si.snapshotId().getName()).collect(Collectors.joining(",")) + "]" - ) - .collect(Collectors.joining(",")); - } - @Override public void triggered(SchedulerEngine.Event event) { assert event.getJobName().equals(SnapshotRetentionService.SLM_RETENTION_JOB_ID) @@ -156,28 +147,9 @@ public void triggered(SchedulerEngine.Event event) { // Finally, asynchronously retrieve all the snapshots, deleting them serially, // before updating the cluster state with the new metrics and setting 'running' // back to false - getAllRetainableSnapshots(repositioriesToFetch, policiesWithRetention.keySet(), new ActionListener<>() { + getSnapshotsEligibleForDeletion(repositioriesToFetch, policiesWithRetention, new ActionListener<>() { @Override - public void onResponse(Map> allSnapshots) { - if (logger.isTraceEnabled()) { - logger.trace("retrieved snapshots: [{}]", formatSnapshots(allSnapshots)); - } - // Find all the snapshots that are past their retention date - final Map>> snapshotsToBeDeleted = allSnapshots.entrySet() - .stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue() - .stream() - .filter(snapshot -> snapshotEligibleForDeletion(snapshot, allSnapshots, policiesWithRetention)) - // SnapshotInfo instances can be quite large in case they contain e.g. a large collection of - // exceptions so we extract the only two things (id + policy id) here so they can be GCed - .map(snapshotInfo -> Tuple.tuple(snapshotInfo.snapshotId(), getPolicyId(snapshotInfo))) - .toList() - ) - ); - + public void onResponse(Map>> snapshotsToBeDeleted) { if (logger.isTraceEnabled()) { logger.trace("snapshots eligible for deletion: [{}]", snapshotsToBeDeleted); } @@ -256,10 +228,10 @@ static boolean snapshotEligibleForDeletion( return eligible; } - void getAllRetainableSnapshots( + void getSnapshotsEligibleForDeletion( Collection repositories, - Set policies, - ActionListener>> listener + Map policies, + ActionListener>>> listener ) { if (repositories.isEmpty()) { // Skip retrieving anything if there are no repositories to fetch @@ -273,7 +245,7 @@ void getAllRetainableSnapshots( // don't time out on this request to not produce failed SLM runs in case of a temporarily slow master node .setMasterNodeTimeout(TimeValue.MAX_VALUE) .setIgnoreUnavailable(true) - .setPolicies(policies.toArray(Strings.EMPTY_ARRAY)) + .setPolicies(policies.keySet().toArray(Strings.EMPTY_ARRAY)) .setIncludeIndexNames(false) .execute(ActionListener.wrap(resp -> { if (logger.isTraceEnabled()) { @@ -300,7 +272,39 @@ void getAllRetainableSnapshots( logger.debug(() -> "unable to retrieve snapshots for [" + repo + "] repositories: ", resp.getFailures().get(repo)); } } - listener.onResponse(snapshots); + + if (logger.isTraceEnabled()) { + logger.trace( + "retrieved snapshots: [{}]", + snapshots.entrySet() + .stream() + .map( + e -> e.getKey() + + ": [" + + e.getValue().stream().map(si -> si.snapshotId().getName()).collect(Collectors.joining(",")) + + "]" + ) + .collect(Collectors.joining(",")) + ); + } + + // Find all the snapshots that are past their retention date + final Map>> snapshotsToBeDeleted = snapshots.entrySet() + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> e.getValue() + .stream() + .filter(snapshot -> snapshotEligibleForDeletion(snapshot, snapshots, policies)) + // SnapshotInfo instances can be quite large in case they contain e.g. a large collection of + // exceptions so we extract the only two things (id + policy id) here so they can be GCed + .map(snapshotInfo -> Tuple.tuple(snapshotInfo.snapshotId(), getPolicyId(snapshotInfo))) + .toList() + ) + ); + + listener.onResponse(snapshotsToBeDeleted); }, e -> { logger.debug(() -> "unable to retrieve snapshots for [" + repositories + "] repositories: ", e); listener.onFailure(e); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java index 15badabf3689a..b120b49c63654 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; @@ -42,7 +43,6 @@ import org.elasticsearch.xpack.core.slm.SnapshotRetentionConfiguration; import org.elasticsearch.xpack.slm.history.SnapshotHistoryStore; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -238,20 +238,6 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { 0L, Collections.emptyMap() ); - final SnapshotInfo ineligibleSnapshot = new SnapshotInfo( - new Snapshot(repoId, new SnapshotId("name2", "uuid2")), - Collections.singletonList("index"), - Collections.emptyList(), - Collections.emptyList(), - null, - System.currentTimeMillis() + 1, - 1, - Collections.emptyList(), - true, - Collections.singletonMap("policy", policyId), - System.currentTimeMillis(), - Collections.emptyMap() - ); Set deleted = ConcurrentHashMap.newKeySet(); Set deletedSnapshotsInHistory = ConcurrentHashMap.newKeySet(); @@ -273,11 +259,9 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { historyLatch.countDown(); }), () -> { - List snaps = new ArrayList<>(2); - snaps.add(eligibleSnapshot); - snaps.add(ineligibleSnapshot); - logger.info("--> retrieving snapshots [{}]", snaps); - return Collections.singletonMap(repoId, snaps); + final var result = Collections.singletonMap(repoId, List.of(Tuple.tuple(eligibleSnapshot.snapshotId(), policyId))); + logger.info("--> retrieving snapshots [{}]", result); + return result; }, (deletionPolicyId, repo, snapId, slmStats, listener) -> { logger.info("--> deleting {} from repo {}", snapId, repo); @@ -295,7 +279,7 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { long time = System.currentTimeMillis(); retentionTask.triggered(new SchedulerEngine.Event(SnapshotRetentionService.SLM_RETENTION_JOB_ID, time, time)); - deletionLatch.await(10, TimeUnit.SECONDS); + safeAwait(deletionLatch); assertThat("something should have been deleted", deleted, not(empty())); assertThat("one snapshot should have been deleted", deleted, hasSize(1)); @@ -364,18 +348,22 @@ protected void ); AtomicReference errHandlerCalled = new AtomicReference<>(null); - task.getAllRetainableSnapshots(Collections.singleton(repoId), Collections.singleton(policyId), new ActionListener<>() { - @Override - public void onResponse(Map> stringListMap) { - logger.info("--> forcing failure"); - throw new ElasticsearchException("forced failure"); - } + task.getSnapshotsEligibleForDeletion( + Collections.singleton(repoId), + Map.of(policyId, new SnapshotLifecyclePolicy(policyId, "test", "* * * * *", repoId, null, null)), + new ActionListener<>() { + @Override + public void onResponse(Map>> snapshotsToBeDeleted) { + logger.info("--> forcing failure"); + throw new ElasticsearchException("forced failure"); + } - @Override - public void onFailure(Exception e) { - errHandlerCalled.set(e); + @Override + public void onFailure(Exception e) { + errHandlerCalled.set(e); + } } - }); + ); assertNotNull(errHandlerCalled.get()); assertThat(errHandlerCalled.get().getMessage(), equalTo("forced failure")); @@ -597,14 +585,14 @@ public ClusterState createState(OperationMode mode, SnapshotLifecyclePolicy... p } private static class MockSnapshotRetentionTask extends SnapshotRetentionTask { - private final Supplier>> snapshotRetriever; + private final Supplier>>> snapshotRetriever; private final DeleteSnapshotMock deleteRunner; MockSnapshotRetentionTask( Client client, ClusterService clusterService, SnapshotHistoryStore historyStore, - Supplier>> snapshotRetriever, + Supplier>>> snapshotRetriever, DeleteSnapshotMock deleteRunner, LongSupplier nanoSupplier ) { @@ -614,10 +602,10 @@ private static class MockSnapshotRetentionTask extends SnapshotRetentionTask { } @Override - void getAllRetainableSnapshots( + void getSnapshotsEligibleForDeletion( Collection repositories, - Set policies, - ActionListener>> listener + Map policies, + ActionListener>>> listener ) { listener.onResponse(this.snapshotRetriever.get()); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml index 417c52e391b7d..b38c6857108cc 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml @@ -1214,3 +1214,30 @@ setup: ml.get_trained_models: model_id: a-regression-model-0 include: definition_status + +--- +"Test include model platform architecture": + - do: + ml.put_trained_model: + model_id: model-without-definition + body: > + { + "model_type": "pytorch", + "inference_config": { + "ner": { + } + }, + "platform_architecture": "windows-x86_64" + } + + - do: + ml.get_trained_models: + model_id: model-without-definition + include: definition_status + - match: { count: 1 } + - match: { trained_model_configs.0.fully_defined: false } + - do: + ml.get_trained_models: + model_id: model-without-definition + - match: { count: 1 } + - match: { trained_model_configs.0.platform_architecture: windows-x86_64 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml new file mode 100644 index 0000000000000..28a6ad826bc64 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml @@ -0,0 +1,111 @@ +# This test uses the simple model defined in +# TextExpansionQueryIT.java to create the token weights. +setup: + - skip: + version: ' - 8.10.99' + reason: "sparse_vector field type reintroduced in 8.11" + features: headers + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: index-with-rank-features + body: + mappings: + properties: + source_text: + type: keyword + ml.tokens: + type: rank_features + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: unrelated + body: + mappings: + properties: + source_text: + type: keyword + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model: + model_id: "text_expansion_model" + body: > + { + "description": "simple model for testing", + "model_type": "pytorch", + "inference_config": { + "text_expansion": { + "tokenization": { + "bert": { + "with_special_tokens": false + } + } + } + } + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_vocabulary: + model_id: "text_expansion_model" + body: > + { "vocabulary": ["[PAD]", "[UNK]", "these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"] } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_definition_part: + model_id: "text_expansion_model" + part: 0 + body: > + { + "total_definition_length":2078, + "definition": "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwpUaW55VGV4dEV4cGFuc2lvbgpxACmBfShYCAAAAHRyYWluaW5ncQGJWBYAAABfaXNfZnVsbF9iYWNrd2FyZF9ob29rcQJOdWJxAy5QSwcIITmbsFgAAABYAAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAAAAAAAdAB0Ac2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQhkAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWoWRT4+cMAzF7/spfASJomF3e0Ga3nrrn8vcELIyxAzRhAQlpjvbT19DWDrdquqBA/bvPT87nVUxwsm41xPd+PNtUi4a77KvXs+W8voBAHFSQY3EFCIiHKFp1+p57vs/ShyUccZdoIaz93aBTMR+thbPqru+qKBx8P4q/e8TyxRlmwVctJp66H1YmCyS7WsZwD50A2L5V7pCBADGTTOj0bGGE7noQyqzv5JDfp0o9fZRCWqP37yjhE4+mqX5X3AdFZHGM/2TzOHDpy1IvQWR+OWo3KwsRiKdpcqg4pBFDtm+QJ7nqwIPckrlnGfFJG0uNhOl38Sjut3pCqg26QuZy8BR9In7ScHHrKkKMW0TIucFrGQXCMpdaDO05O6DpOiy8e4kr0Ed/2YKOIhplW8gPr4ntygrd9ixpx3j9UZZVRagl2c6+imWUzBjuf5m+Ch7afphuvvW+r/0dsfn+2N9MZGb9+/SFtCYdhd83CMYp+mGy0LiKNs8y/eUuEA8B/d2z4dfUEsHCFSE3IaCAQAAIAMAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJwApAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCJQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpahZHLbtNAFIZtp03rSVIuLRKXjdk5ojitKJsiFq24lem0KKSqpRIZt55gE9/GM+lNLFgx4i1Ys2aHhIBXgAVICNggHgNm6rqJN2BZGv36/v/MOWeea/Z5RVHurLfRUsfZXOnccx522itrd53O0vLqbaKYtsAKUe1pcege7hm9JNtzM8+kOOzNApIX0A3xBXE6YE7g0UWjg2OaZAJXbKvALOnj2GEHKc496ykLktgNt3Jz17hprCUxFqExe7YIpQkNpO1/kfHhPUdtUAdH2/gfmeYiIFW7IkM6IBP2wrDNbMe3Mjf2ksiK3Hjghg7F2DN9l/omZZl5Mmez2QRk0q4WUUB0+1oh9nDwxGdUXJdXPMRZQs352eGaRPV9s2lcMeZFGWBfKJJiw0YgbCMLBaRmXyy4flx6a667Fch55q05QOq2Jg2ANOyZwplhNsjiohVApo7aa21QnNGW5+4GXv8gxK1beBeHSRrhmLXWVh+0aBhErZ7bx1ejxMOhlR6QU4ycNqGyk8/yNGCWkwY7/RCD7UEQek4QszCgDJAzZtfErA0VqHBy9ugQP9pUfUmgCjVYgWNwHFbhBJyEOgSwBuuwARWZmoI6J9PwLfzEocpRpPrT8DP8wqHG0b4UX+E3DiscvRglXIoi81KKPwioHI5x9EooNKWiy0KOc/T6WF4SssrRuzJ9L2VNRXUhJzj6UKYfS4W/q/5wuh/l4M9R9qsU+y2dpoo2hJzkaEET8r6KRONicnRdK9EbUi6raFVIwNGjsrlbpk6ZPi7TbS3fv3LyNjPiEKzG0aG0tvNb6xw90/whe6ONjnJcUxobHDUqQ8bIOW79BVBLBwhfSmPKdAIAAE4EAABQSwMEAAAICAAAAAAAAAAAAAAAAAAAAAAAABkABQBzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsRkIBAFqAAikuUEsHCG0vCVcEAAAABAAAAFBLAwQAAAgIAAAAAAAAAAAAAAAAAAAAAAAAEwA7AHNpbXBsZW1vZGVsL3ZlcnNpb25GQjcAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWjMKUEsHCNGeZ1UCAAAAAgAAAFBLAQIAAAAACAgAAAAAAAAhOZuwWAAAAFgAAAAUAAAAAAAAAAAAAAAAAAAAAABzaW1wbGVtb2RlbC9kYXRhLnBrbFBLAQIAABQACAgIAAAAAABUhNyGggEAACADAAAdAAAAAAAAAAAAAAAAAKgAAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weVBLAQIAABQACAgIAAAAAABfSmPKdAIAAE4EAAAnAAAAAAAAAAAAAAAAAJICAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xQSwECAAAAAAgIAAAAAAAAbS8JVwQAAAAEAAAAGQAAAAAAAAAAAAAAAACEBQAAc2ltcGxlbW9kZWwvY29uc3RhbnRzLnBrbFBLAQIAAAAACAgAAAAAAADRnmdVAgAAAAIAAAATAAAAAAAAAAAAAAAAANQFAABzaW1wbGVtb2RlbC92ZXJzaW9uUEsGBiwAAAAAAAAAHgMtAAAAAAAAAAAABQAAAAAAAAAFAAAAAAAAAGoBAAAAAAAAUgYAAAAAAABQSwYHAAAAALwHAAAAAAAAAQAAAFBLBQYAAAAABQAFAGoBAABSBgAAAAA=", + "total_parts": 1 + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + bulk: + index: index-with-rank-features + refresh: true + body: | + {"index": {}} + {"source_text": "my words comforter", "ml.tokens":{"my":1.0, "words":1.0,"comforter":1.0}} + {"index": {}} + {"source_text": "the machine is leaking", "ml.tokens":{"the":1.0,"machine":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "these are my words", "ml.tokens":{"these":1.0,"are":1.0,"my":1.0,"words":1.0}} + {"index": {}} + {"source_text": "the octopus comforter smells", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"smells":1.0}} + {"index": {}} + {"source_text": "the octopus comforter is leaking", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "washing machine smells", "ml.tokens":{"washing":1.0,"machine":1.0,"smells":1.0}} + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + ml.start_trained_model_deployment: + model_id: text_expansion_model + wait_for: started + +--- +"Test text expansion search": + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml new file mode 100644 index 0000000000000..5a31af18f8269 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml @@ -0,0 +1,111 @@ +# This test uses the simple model defined in +# TextExpansionQueryIT.java to create the token weights. +setup: + - skip: + features: headers + version: ' - 8.7.99' + reason: "text_expansion query introduced in 8.8" + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: index-with-rank-features + body: + mappings: + properties: + source_text: + type: keyword + ml.tokens: + type: sparse_vector + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: unrelated + body: + mappings: + properties: + source_text: + type: keyword + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model: + model_id: "text_expansion_model" + body: > + { + "description": "simple model for testing", + "model_type": "pytorch", + "inference_config": { + "text_expansion": { + "tokenization": { + "bert": { + "with_special_tokens": false + } + } + } + } + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_vocabulary: + model_id: "text_expansion_model" + body: > + { "vocabulary": ["[PAD]", "[UNK]", "these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"] } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_definition_part: + model_id: "text_expansion_model" + part: 0 + body: > + { + "total_definition_length":2078, + "definition": "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwpUaW55VGV4dEV4cGFuc2lvbgpxACmBfShYCAAAAHRyYWluaW5ncQGJWBYAAABfaXNfZnVsbF9iYWNrd2FyZF9ob29rcQJOdWJxAy5QSwcIITmbsFgAAABYAAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAAAAAAAdAB0Ac2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQhkAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWoWRT4+cMAzF7/spfASJomF3e0Ga3nrrn8vcELIyxAzRhAQlpjvbT19DWDrdquqBA/bvPT87nVUxwsm41xPd+PNtUi4a77KvXs+W8voBAHFSQY3EFCIiHKFp1+p57vs/ShyUccZdoIaz93aBTMR+thbPqru+qKBx8P4q/e8TyxRlmwVctJp66H1YmCyS7WsZwD50A2L5V7pCBADGTTOj0bGGE7noQyqzv5JDfp0o9fZRCWqP37yjhE4+mqX5X3AdFZHGM/2TzOHDpy1IvQWR+OWo3KwsRiKdpcqg4pBFDtm+QJ7nqwIPckrlnGfFJG0uNhOl38Sjut3pCqg26QuZy8BR9In7ScHHrKkKMW0TIucFrGQXCMpdaDO05O6DpOiy8e4kr0Ed/2YKOIhplW8gPr4ntygrd9ixpx3j9UZZVRagl2c6+imWUzBjuf5m+Ch7afphuvvW+r/0dsfn+2N9MZGb9+/SFtCYdhd83CMYp+mGy0LiKNs8y/eUuEA8B/d2z4dfUEsHCFSE3IaCAQAAIAMAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJwApAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCJQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpahZHLbtNAFIZtp03rSVIuLRKXjdk5ojitKJsiFq24lem0KKSqpRIZt55gE9/GM+lNLFgx4i1Ys2aHhIBXgAVICNggHgNm6rqJN2BZGv36/v/MOWeea/Z5RVHurLfRUsfZXOnccx522itrd53O0vLqbaKYtsAKUe1pcege7hm9JNtzM8+kOOzNApIX0A3xBXE6YE7g0UWjg2OaZAJXbKvALOnj2GEHKc496ykLktgNt3Jz17hprCUxFqExe7YIpQkNpO1/kfHhPUdtUAdH2/gfmeYiIFW7IkM6IBP2wrDNbMe3Mjf2ksiK3Hjghg7F2DN9l/omZZl5Mmez2QRk0q4WUUB0+1oh9nDwxGdUXJdXPMRZQs352eGaRPV9s2lcMeZFGWBfKJJiw0YgbCMLBaRmXyy4flx6a667Fch55q05QOq2Jg2ANOyZwplhNsjiohVApo7aa21QnNGW5+4GXv8gxK1beBeHSRrhmLXWVh+0aBhErZ7bx1ejxMOhlR6QU4ycNqGyk8/yNGCWkwY7/RCD7UEQek4QszCgDJAzZtfErA0VqHBy9ugQP9pUfUmgCjVYgWNwHFbhBJyEOgSwBuuwARWZmoI6J9PwLfzEocpRpPrT8DP8wqHG0b4UX+E3DiscvRglXIoi81KKPwioHI5x9EooNKWiy0KOc/T6WF4SssrRuzJ9L2VNRXUhJzj6UKYfS4W/q/5wuh/l4M9R9qsU+y2dpoo2hJzkaEET8r6KRONicnRdK9EbUi6raFVIwNGjsrlbpk6ZPi7TbS3fv3LyNjPiEKzG0aG0tvNb6xw90/whe6ONjnJcUxobHDUqQ8bIOW79BVBLBwhfSmPKdAIAAE4EAABQSwMEAAAICAAAAAAAAAAAAAAAAAAAAAAAABkABQBzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsRkIBAFqAAikuUEsHCG0vCVcEAAAABAAAAFBLAwQAAAgIAAAAAAAAAAAAAAAAAAAAAAAAEwA7AHNpbXBsZW1vZGVsL3ZlcnNpb25GQjcAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWjMKUEsHCNGeZ1UCAAAAAgAAAFBLAQIAAAAACAgAAAAAAAAhOZuwWAAAAFgAAAAUAAAAAAAAAAAAAAAAAAAAAABzaW1wbGVtb2RlbC9kYXRhLnBrbFBLAQIAABQACAgIAAAAAABUhNyGggEAACADAAAdAAAAAAAAAAAAAAAAAKgAAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weVBLAQIAABQACAgIAAAAAABfSmPKdAIAAE4EAAAnAAAAAAAAAAAAAAAAAJICAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xQSwECAAAAAAgIAAAAAAAAbS8JVwQAAAAEAAAAGQAAAAAAAAAAAAAAAACEBQAAc2ltcGxlbW9kZWwvY29uc3RhbnRzLnBrbFBLAQIAAAAACAgAAAAAAADRnmdVAgAAAAIAAAATAAAAAAAAAAAAAAAAANQFAABzaW1wbGVtb2RlbC92ZXJzaW9uUEsGBiwAAAAAAAAAHgMtAAAAAAAAAAAABQAAAAAAAAAFAAAAAAAAAGoBAAAAAAAAUgYAAAAAAABQSwYHAAAAALwHAAAAAAAAAQAAAFBLBQYAAAAABQAFAGoBAABSBgAAAAA=", + "total_parts": 1 + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + bulk: + index: index-with-rank-features + refresh: true + body: | + {"index": {}} + {"source_text": "my words comforter", "ml.tokens":{"my":1.0, "words":1.0,"comforter":1.0}} + {"index": {}} + {"source_text": "the machine is leaking", "ml.tokens":{"the":1.0,"machine":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "these are my words", "ml.tokens":{"these":1.0,"are":1.0,"my":1.0,"words":1.0}} + {"index": {}} + {"source_text": "the octopus comforter smells", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"smells":1.0}} + {"index": {}} + {"source_text": "the octopus comforter is leaking", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "washing machine smells", "ml.tokens":{"washing":1.0,"machine":1.0,"smells":1.0}} + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + ml.start_trained_model_deployment: + model_id: text_expansion_model + wait_for: started + +--- +"Test text expansion search": + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" }