diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 1ddb3e82920cd..37ea49e3a6d95 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 03368e7e4a9c0..8819a5f7f493f 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -272,8 +272,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.3 + - label: "{{matrix.image}} / 8.15.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.4 timeout_in_minutes: 300 matrix: setup: @@ -286,7 +286,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.15.3 + BWC_VERSION: 8.15.4 - label: "{{matrix.image}} / 8.16.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index d572dd104d215..7b6a6ea72fe83 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -287,8 +287,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.3#bwcTest + - label: 8.15.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -297,7 +297,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.15.3 + BWC_VERSION: 8.15.4 retry: automatic: - exit_status: "-1" @@ -429,7 +429,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +471,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index ecfb8088072a0..81b8225e443a4 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -22,6 +22,7 @@ if [[ "$BRANCH" == "main" ]]; then fi ES_VERSION=$(grep elasticsearch build-tools-internal/version.properties | sed "s/elasticsearch *= *//g") +echo "ES_VERSION=$ES_VERSION" VERSION_SUFFIX="" if [[ "$WORKFLOW" == "snapshot" ]]; then @@ -29,7 +30,10 @@ if [[ "$WORKFLOW" == "snapshot" ]]; then fi BEATS_BUILD_ID="$(./.ci/scripts/resolve-dra-manifest.sh beats "$RM_BRANCH" "$ES_VERSION" "$WORKFLOW")" +echo "BEATS_BUILD_ID=$BEATS_BUILD_ID" + ML_CPP_BUILD_ID="$(./.ci/scripts/resolve-dra-manifest.sh ml-cpp "$RM_BRANCH" "$ES_VERSION" "$WORKFLOW")" +echo "ML_CPP_BUILD_ID=$ML_CPP_BUILD_ID" LICENSE_KEY_ARG="" BUILD_SNAPSHOT_ARG="" diff --git a/.ci/bwcVersions b/.ci/bwcVersions index cd1f7d1ae269f..2e77631450825 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,7 +14,7 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.3" + - "8.15.4" - "8.16.0" - "8.17.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 67ebf0c51ab1f..c6edc709a8ceb 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.15.3" + - "8.15.4" - "8.16.0" - "8.17.0" - "9.0.0" diff --git a/docs/changelog/111684.yaml b/docs/changelog/111684.yaml deleted file mode 100644 index 32edb5723cb0a..0000000000000 --- a/docs/changelog/111684.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 111684 -summary: Write downloaded model parts async -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/112761.yaml b/docs/changelog/112761.yaml deleted file mode 100644 index fe63f38f365a4..0000000000000 --- a/docs/changelog/112761.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 112761 -summary: Fix collapse interaction with stored fields -area: Search -type: bug -issues: - - 112646 diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml new file mode 100644 index 0000000000000..a8a0d542f8201 --- /dev/null +++ b/docs/changelog/112881.yaml @@ -0,0 +1,5 @@ +pr: 112881 +summary: "ESQL: Remove parent from `FieldAttribute`" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/113123.yaml b/docs/changelog/113123.yaml deleted file mode 100644 index 43008eaa80f43..0000000000000 --- a/docs/changelog/113123.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113123 -summary: "ES|QL: Skip CASE function from `InferIsNotNull` rule checks" -area: ES|QL -type: bug -issues: - - 112704 diff --git a/docs/changelog/113129.yaml b/docs/changelog/113129.yaml deleted file mode 100644 index d88d86387ac10..0000000000000 --- a/docs/changelog/113129.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113129 -summary: Fix `needsScore` computation in `GlobalOrdCardinalityAggregator` -area: Aggregations -type: bug -issues: - - 112975 diff --git a/docs/changelog/113266.yaml b/docs/changelog/113266.yaml deleted file mode 100644 index d423387d45738..0000000000000 --- a/docs/changelog/113266.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113266 -summary: "[M] Fix error message formatting" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/113437.yaml b/docs/changelog/113437.yaml deleted file mode 100644 index 98831958e63f8..0000000000000 --- a/docs/changelog/113437.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113437 -summary: Fix check on E5 model platform compatibility -area: Machine Learning -type: bug -issues: - - 113577 diff --git a/docs/changelog/113697.yaml b/docs/changelog/113697.yaml deleted file mode 100644 index 1362e01fcc89b..0000000000000 --- a/docs/changelog/113697.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113697 -summary: Handle parsing ingest processors where definition is not a object -area: Machine Learning -type: bug -issues: - - 113615 diff --git a/docs/changelog/113699.yaml b/docs/changelog/113699.yaml deleted file mode 100644 index 3876c8147e7eb..0000000000000 --- a/docs/changelog/113699.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113699 -summary: "[ESQL] Fix init value in max float aggregation" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/113846.yaml b/docs/changelog/113846.yaml deleted file mode 100644 index 5fdd56e98d706..0000000000000 --- a/docs/changelog/113846.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113846 -summary: Don't validate internal stats if they are empty -area: Aggregations -type: bug -issues: - - 113811 diff --git a/docs/changelog/113869.yaml b/docs/changelog/113869.yaml deleted file mode 100644 index f1cd1ec423966..0000000000000 --- a/docs/changelog/113869.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113869 -summary: Upgrade protobufer to 3.25.5 -area: Snapshot/Restore -type: upgrade -issues: [] diff --git a/docs/changelog/113961.yaml b/docs/changelog/113961.yaml deleted file mode 100644 index 24cb1f45f029e..0000000000000 --- a/docs/changelog/113961.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113961 -summary: "[ESQL] Support datetime data type in Least and Greatest functions" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114116.yaml b/docs/changelog/114116.yaml deleted file mode 100644 index 8d1c9e162ae23..0000000000000 --- a/docs/changelog/114116.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114116 -summary: "ES|QL: Ensure minimum capacity for `PlanStreamInput` caches" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114264.yaml b/docs/changelog/114264.yaml deleted file mode 100644 index fe421f6422830..0000000000000 --- a/docs/changelog/114264.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114264 -summary: "Fix analyzed wildcard query in simple_query_string when disjunctions is empty" -area: Search -type: bug -issues: [114185] diff --git a/docs/changelog/114337.yaml b/docs/changelog/114337.yaml deleted file mode 100644 index ec55be8bb179b..0000000000000 --- a/docs/changelog/114337.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114337 -summary: "Enables cluster state role mapper, to include ECK operator-defined role mappings in role resolution" -area: Authentication -type: bug -issues: [] diff --git a/docs/reference/connector/docs/connectors-content-extraction.asciidoc b/docs/reference/connector/docs/connectors-content-extraction.asciidoc index b785d62f0f553..5d2a9550a7c3c 100644 --- a/docs/reference/connector/docs/connectors-content-extraction.asciidoc +++ b/docs/reference/connector/docs/connectors-content-extraction.asciidoc @@ -90,7 +90,7 @@ include::_connectors-list-local-content-extraction.asciidoc[] Self-hosted content extraction is handled by a *separate* extraction service. The versions for the extraction service do not align with the Elastic stack. -For version `8.11.x`, you should use extraction service version `0.3.x`. +For versions after `8.11.x` (including {version}), you should use extraction service version `0.3.x`. You can run the service with the following command: diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 8fdf8aecc2ae5..5cb03d950f68c 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -21,6 +21,7 @@ the following APIs to manage {infer} models and perform {infer}: * <> * <> * <> +* <> [[inference-landscape]] .A representation of the Elastic inference landscape @@ -39,6 +40,7 @@ include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] include::put-inference.asciidoc[] +include::update-inference.asciidoc[] include::service-alibabacloud-ai-search.asciidoc[] include::service-amazon-bedrock.asciidoc[] include::service-anthropic.asciidoc[] diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc new file mode 100644 index 0000000000000..166b002ea45f5 --- /dev/null +++ b/docs/reference/inference/update-inference.asciidoc @@ -0,0 +1,87 @@ +[role="xpack"] +[[update-inference-api]] +=== Update inference API + +experimental[] + +Updates an {infer} endpoint. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. + + +[discrete] +[[update-inference-api-request]] +==== {api-request-title} + +`POST _inference//_update` + +`POST _inference///_update` + + +[discrete] +[[update-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage_inference` <> (the built-in inference_admin role grants this privilege) +* Requires an existing {infer} endpoint, created by using the <> + + +[discrete] +[[update-inference-api-desc]] +==== {api-description-title} + +The update inference API enables you to update the task_settings, secrets, and/or num_allocations of an existing {infer} endpoint. + +To use the update API, you can modify `task_settings`, secrets (within `service_settings`), or `num_allocations`, depending on the specific endpoint service and task_type you've created. +To view the updatable `task_settings`, the field names of secrets (specific to each service), and the services where `num_allocations` is applicable (only for the `elasticsearch` service), refer to the following list of services available through the {infer} API. +You will find the available task types next to each service name. +Click the links to review the service configuration details: + +* <> (`completion`, `rerank`, `sparse_embedding`, `text_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`completion`) +* <> (`completion`, `text_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`completion`, `rerank`, `text_embedding`) +* <> (`rerank`, `sparse_embedding`, `text_embedding` - this service is for built-in models and models uploaded through Eland) +* <> (`sparse_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`rerank`, `text_embedding`) +* <> (`text_embedding`) +* <> (`text_embedding`) +* <> (`completion`, `text_embedding`) + + +[discrete] +[[update-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} endpoint. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. +Refer to the service list in the <> for the available task types. + + +[discrete] +[[update-inference-api-example]] +==== {api-examples-title} + +The following example shows how to update an API key of an {infer} endpoint called `my-inference-endpoint`: + +[source,console] +------------------------------------------------------------ +POST _inference/my-inference-endpoint/_update +{ + "service_settings": { + "api_key": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index ef19fbf4e267d..d01047eac9815 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -3,7 +3,6 @@ Adaptive allocations configuration object. If enabled, the number of allocations of the model is set based on the current load the process gets. When the load is high, a new model allocation is automatically created (respecting the value of `max_number_of_allocations` if it's set). When the load is low, a model allocation is automatically removed (respecting the value of `min_number_of_allocations` if it's set). -The number of model allocations cannot be scaled down to less than `1` this way. If `adaptive_allocations` is enabled, do not set the number of allocations manually. end::adaptive-allocation[] diff --git a/muted-tests.yml b/muted-tests.yml index 2e623fa94e06a..78f8b76aaff64 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -306,57 +306,18 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/10_basic/Test using the deprecated elasticsearch_version field results in a warning} - issue: https://github.com/elastic/elasticsearch/issues/114748 - class: org.elasticsearch.xpack.eql.EqlRestIT method: testIndexWildcardPatterns issue: https://github.com/elastic/elasticsearch/issues/114749 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testBadRequests - issue: https://github.com/elastic/elasticsearch/issues/114752 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich stats REST response structure} - issue: https://github.com/elastic/elasticsearch/issues/114753 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/30_tsdb_index/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114761 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich documents over _bulk via an alias} - issue: https://github.com/elastic/elasticsearch/issues/114763 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/10_basic/Test enrich crud apis} - issue: https://github.com/elastic/elasticsearch/issues/114766 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114768 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/50_data_stream/enrich documents over _bulk via a data stream} - issue: https://github.com/elastic/elasticsearch/issues/114769 -- class: org.elasticsearch.xpack.eql.EqlRestValidationIT - method: testDefaultIndicesOptions - issue: https://github.com/elastic/elasticsearch/issues/114771 - class: org.elasticsearch.xpack.enrich.EnrichIT method: testEnrichSpecialTypes issue: https://github.com/elastic/elasticsearch/issues/114773 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testDeleteExistingPipeline - issue: https://github.com/elastic/elasticsearch/issues/114775 - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 -- class: org.elasticsearch.xpack.eql.EqlRestValidationIT - method: testAllowNoIndicesOption - issue: https://github.com/elastic/elasticsearch/issues/114789 -- class: org.elasticsearch.xpack.eql.EqlStatsIT - method: testEqlRestUsage - issue: https://github.com/elastic/elasticsearch/issues/114790 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testUnicodeChars - issue: https://github.com/elastic/elasticsearch/issues/114791 - class: org.elasticsearch.ingest.geoip.HttpClientTests issue: https://github.com/elastic/elasticsearch/issues/112618 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT @@ -382,6 +343,15 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testUpgradeMovesRepoToNewMetaVersion + issue: https://github.com/elastic/elasticsearch/issues/114994 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testReadOnlyRepo + issue: https://github.com/elastic/elasticsearch/issues/114997 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testCreateAndRestoreSnapshot + issue: https://github.com/elastic/elasticsearch/issues/114998 # Examples: # diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index dcf6f7aebdc65..d85990b4ede8c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,6 +175,8 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); + public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 48bf08ddfc028..5e4df05c10182 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -186,6 +186,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_1 = new Version(8_15_01_99); public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); + public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 5aec2bcd04b26..2e1d58e042f09 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; @@ -254,8 +255,10 @@ public SearchRequest(StreamInput in) throws IOException { finalReduce = true; } ccsMinimizeRoundtrips = in.readBoolean(); - if (in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) && in.readBoolean()) { - Version.readVersion(in); // and drop on the floor + if ((in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) + || in.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) && in.readBoolean()) { + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed (again) when the v9 transport version can diverge + Version v = Version.readVersion(in); // and drop on the floor } waitForCheckpoints = in.readMap(StreamInput::readLongArray); waitForCheckpointsTimeout = in.readTimeValue(); @@ -291,7 +294,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(finalReduce); } out.writeBoolean(ccsMinimizeRoundtrips); - if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { + if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) + || out.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { out.writeBoolean(false); } out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 347b44a22e7c0..25e9c1e3701fb 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.node.Node; @@ -830,6 +831,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile boolean skipIgnoredSourceRead; private volatile boolean syntheticSourceSecondDocParsingPassEnabled; private final SourceFieldMapper.Mode indexMappingSourceMode; + private final boolean recoverySourceEnabled; /** * The maximum number of refresh listeners allows on this shard. @@ -992,6 +994,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); + recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings); scopedSettings.addSettingsUpdateConsumer( MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, @@ -1687,6 +1690,14 @@ public SourceFieldMapper.Mode getIndexMappingSourceMode() { return indexMappingSourceMode; } + /** + * @return Whether recovery source should be enabled if needed. + * Note that this is a node setting, and this setting is not sourced from index settings. + */ + public boolean isRecoverySourceEnabled() { + return recoverySourceEnabled; + } + /** * The bounds for {@code @timestamp} on this index or * {@code null} if there are no bounds. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index ea1ffdb7c019f..dd09dc6ea0c5c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -39,8 +39,6 @@ import java.util.List; import java.util.Locale; -import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING; - public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); public static final NodeFeature SYNTHETIC_SOURCE_STORED_FIELDS_ADVANCE_FIX = new NodeFeature( @@ -84,8 +82,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true + null ); private static final SourceFieldMapper DEFAULT_DISABLED = new SourceFieldMapper( @@ -93,17 +90,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true - ); - - private static final SourceFieldMapper DEFAULT_DISABLED_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.DISABLED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false + null ); private static final SourceFieldMapper DEFAULT_SYNTHETIC = new SourceFieldMapper( @@ -111,26 +98,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true - ); - - private static final SourceFieldMapper DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false - ); - - private static final SourceFieldMapper DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - null, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false + null ); private static final SourceFieldMapper TSDB_DEFAULT = new SourceFieldMapper( @@ -138,8 +106,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true + IndexMode.TIME_SERIES ); private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper( @@ -147,26 +114,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true - ); - - private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false - ); - - private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false + IndexMode.TIME_SERIES ); private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper( @@ -174,8 +122,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - true + IndexMode.LOGSDB ); private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper( @@ -183,26 +130,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - true - ); - - private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - false - ); - - private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - false + IndexMode.LOGSDB ); /* @@ -214,17 +142,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true - ); - - private static final SourceFieldMapper TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - null, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false + IndexMode.TIME_SERIES ); public static class Defaults { @@ -285,20 +203,12 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - private final boolean enableRecoverySource; - - public Builder( - IndexMode indexMode, - final Settings settings, - boolean supportsCheckForNonDefaultParams, - boolean enableRecoverySource - ) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); - this.enableRecoverySource = enableRecoverySource; } public Builder setSynthetic() { @@ -333,7 +243,7 @@ public SourceFieldMapper build() { ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) : mode.get(); if (isDefault(sourceMode)) { - return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode); } if (supportsNonDefaultParameterValues == false) { @@ -364,8 +274,7 @@ public SourceFieldMapper build() { enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), excludes.getValue().toArray(Strings.EMPTY_ARRAY), - indexMode, - enableRecoverySource + indexMode ); if (indexMode != null) { indexMode.validateSourceFieldMapper(sourceFieldMapper); @@ -375,16 +284,16 @@ public SourceFieldMapper build() { } - private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode, boolean enableRecoverySource) { + private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode) { switch (indexMode) { case STANDARD: switch (sourceMode) { case SYNTHETIC: - return enableRecoverySource ? DEFAULT_SYNTHETIC : DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE; + return DEFAULT_SYNTHETIC; case STORED: - return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + return DEFAULT; case DISABLED: - return enableRecoverySource ? DEFAULT_DISABLED : DEFAULT_DISABLED_NO_RECOVERY_SOURCE; + return DEFAULT_DISABLED; default: throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); } @@ -392,15 +301,9 @@ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, fi case LOGSDB: switch (sourceMode) { case SYNTHETIC: - return enableRecoverySource - ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT) - : (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_NO_RECOVERY_SOURCE : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE); + return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; case STORED: - return enableRecoverySource - ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED) - : (indexMode == IndexMode.TIME_SERIES - ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED - : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED); + return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; case DISABLED: throw new IllegalArgumentException("_source can not be disabled in index using [" + indexMode + "] index mode"); default: @@ -413,21 +316,19 @@ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, fi public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { final IndexMode indexMode = c.getIndexSettings().getMode(); - boolean enableRecoverySource = INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings()); final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); if (indexMode.isSyntheticSourceEnabled()) { if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { - return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE; + return TSDB_LEGACY_DEFAULT; } } - return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode); }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), - INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings()) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) ) ); @@ -480,16 +381,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final SourceFilter sourceFilter; private final IndexMode indexMode; - private final boolean enableRecoverySource; - - private SourceFieldMapper( - Mode mode, - Explicit enabled, - String[] includes, - String[] excludes, - IndexMode indexMode, - boolean enableRecoverySource - ) { + + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, IndexMode indexMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); assert enabled.explicit() == false || mode == null; this.mode = mode; @@ -502,7 +395,6 @@ private SourceFieldMapper( } this.complete = stored() && sourceFilter == null; this.indexMode = indexMode; - this.enableRecoverySource = enableRecoverySource; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -547,6 +439,7 @@ public void preParse(DocumentParserContext context) throws IOException { context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } + boolean enableRecoverySource = context.indexSettings().isRecoverySourceEnabled(); if (enableRecoverySource && originalSource != null && adaptedSource != originalSource) { // if we omitted source or modified it we add the _recovery_source to ensure we have it for ops based recovery BytesRef ref = originalSource.toBytesRef(); @@ -575,7 +468,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY, false, enableRecoverySource).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java index 89240a94f14ce..da16d8d79fef4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.Locale; -import static org.elasticsearch.common.geo.GeoUtils.normalizeLat; -import static org.elasticsearch.common.geo.GeoUtils.normalizeLon; import static org.elasticsearch.common.geo.GeoUtils.quantizeLat; /** @@ -113,15 +111,13 @@ public static int checkPrecisionRange(int precision) { * Calculates the x-coordinate in the tile grid for the specified longitude given * the number of tile columns for a pre-determined zoom-level. * - * @param longitude the longitude to use when determining the tile x-coordinate + * @param longitude the longitude to use when determining the tile x-coordinate. Longitude is in degrees + * and must be between -180 and 180 degrees. * @param tiles the number of tiles per row for a pre-determined zoom-level */ public static int getXTile(double longitude, int tiles) { - // normalizeLon treats this as 180, which is not friendly for tile mapping - if (longitude == -180) { - return 0; - } - final double xTile = (normalizeLon(longitude) + 180.0) / 360.0 * tiles; + assert longitude >= -180 && longitude <= 180 : "Longitude must be between -180 and 180 degrees"; + final double xTile = (longitude + 180.0) / 360.0 * tiles; // Edge values may generate invalid values, and need to be clipped. return Math.max(0, Math.min(tiles - 1, (int) Math.floor(xTile))); } @@ -130,11 +126,13 @@ public static int getXTile(double longitude, int tiles) { * Calculates the y-coordinate in the tile grid for the specified longitude given * the number of tile rows for pre-determined zoom-level. * - * @param latitude the latitude to use when determining the tile y-coordinate + * @param latitude the latitude to use when determining the tile y-coordinate. Latitude is in degrees + * and must be between -90 and 90 degrees. * @param tiles the number of tiles per column for a pre-determined zoom-level */ public static int getYTile(double latitude, int tiles) { - final double latSin = SloppyMath.cos(PI_DIV_2 - Math.toRadians(normalizeLat(latitude))); + assert latitude >= -90 && latitude <= 90 : "Latitude must be between -90 and 90 degrees"; + final double latSin = SloppyMath.cos(PI_DIV_2 - Math.toRadians(latitude)); final double yTile = (0.5 - (ESSloppyMath.log((1.0 + latSin) / (1.0 - latSin)) / PI_TIMES_4)) * tiles; // Edge values may generate invalid values, and need to be clipped. // For example, polar regions (above/below lat 85.05112878) get normalized. diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 44c752def351e..b0ef5b780e775 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -130,3 +130,4 @@ 8.15.0,8702002 8.15.1,8702002 8.15.2,8702003 +8.15.3,8702003 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 971940041f9b1..e3681cc975988 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -130,3 +130,4 @@ 8.15.0,8512000 8.15.1,8512000 8.15.2,8512000 +8.15.3,8512000 diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 3079b6d4b0371..c6ca97fd5694a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -16,9 +16,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; @@ -102,6 +105,23 @@ public void testSerialization() throws Exception { assertNotSame(deserializedRequest, searchRequest); } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed when the affected transport version constants are collapsed + public void testSerializationConstants() throws Exception { + SearchRequest searchRequest = createSearchRequest(); + + // something serialized with previous version to remove, should read correctly with the reversion + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(TransportVersionUtils.getPreviousVersion(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE)); + searchRequest.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + in.setTransportVersion(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE); + SearchRequest copiedRequest = new SearchRequest(in); + assertEquals(copiedRequest, searchRequest); + assertEquals(copiedRequest.hashCode(), searchRequest.hashCode()); + } + } + } + public void testSerializationMultiKNN() throws Exception { SearchRequest searchRequest = createSearchRequest(); if (searchRequest.source() == null) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 358ad07664870..399740e6200e6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, true).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 658176b1d31dd..fdc18264e2299 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, true).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 6b42dbbb39c9f..5371893993318 100644 --- a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -173,8 +174,8 @@ public void testGeoTileFormat() { assertEquals("29/536869420/0", DocValueFormat.GEOTILE.format(longEncode(179.999, 89.999, 29))); assertEquals("29/1491/536870911", DocValueFormat.GEOTILE.format(longEncode(-179.999, -89.999, 29))); assertEquals("2/2/1", DocValueFormat.GEOTILE.format(longEncode(1, 1, 2))); - assertEquals("1/1/0", DocValueFormat.GEOTILE.format(longEncode(13, 95, 1))); - assertEquals("1/1/1", DocValueFormat.GEOTILE.format(longEncode(13, -95, 1))); + assertEquals("1/1/0", DocValueFormat.GEOTILE.format(longEncode(13, GeoUtils.normalizeLat(95), 1))); + assertEquals("1/1/1", DocValueFormat.GEOTILE.format(longEncode(13, GeoUtils.normalizeLat(-95), 1))); } public void testRawParse() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java index b056cc1fcc988..975c1af3dc3d9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import static org.elasticsearch.common.geo.GeoUtils.normalizeLat; +import static org.elasticsearch.common.geo.GeoUtils.normalizeLon; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.MAX_ZOOM; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.checkPrecisionRange; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.hashToGeoPoint; @@ -53,20 +55,20 @@ public void testLongEncode() { assertEquals(0x77FFFF4580000000L, longEncode(179.999, 89.999, 29)); assertEquals(0x740000BA7FFFFFFFL, longEncode(-179.999, -89.999, 29)); assertEquals(0x0800000040000001L, longEncode(1, 1, 2)); - assertEquals(0x0C00000060000000L, longEncode(-20, 100, 3)); + assertEquals(0x0C00000060000000L, longEncode(-20, normalizeLat(100), 3)); assertEquals(0x71127D27C8ACA67AL, longEncode(13, -15, 28)); assertEquals(0x4C0077776003A9ACL, longEncode(-12, 15, 19)); - assertEquals(0x140000024000000EL, longEncode(-328.231870, 16.064082, 5)); - assertEquals(0x6436F96B60000000L, longEncode(-590.769588, 89.549167, 25)); - assertEquals(0x6411BD6BA0A98359L, longEncode(999.787079, 51.830093, 25)); - assertEquals(0x751BD6BBCA983596L, longEncode(999.787079, 51.830093, 29)); - assertEquals(0x77CF880A20000000L, longEncode(-557.039740, -632.103969, 29)); + assertEquals(0x140000024000000EL, longEncode(normalizeLon(-328.231870), 16.064082, 5)); + assertEquals(0x6436F96B60000000L, longEncode(normalizeLon(-590.769588), 89.549167, 25)); + assertEquals(0x6411BD6BA0A98359L, longEncode(normalizeLon(999.787079), 51.830093, 25)); + assertEquals(0x751BD6BBCA983596L, longEncode(normalizeLon(999.787079), 51.830093, 29)); + assertEquals(0x77CF880A20000000L, longEncode(normalizeLon(-557.039740), normalizeLat(-632.103969), 29)); assertEquals(0x7624FA4FA0000000L, longEncode(13, 88, 29)); assertEquals(0x7624FA4FBFFFFFFFL, longEncode(13, -88, 29)); assertEquals(0x0400000020000000L, longEncode(13, 89, 1)); assertEquals(0x0400000020000001L, longEncode(13, -89, 1)); - assertEquals(0x0400000020000000L, longEncode(13, 95, 1)); - assertEquals(0x0400000020000001L, longEncode(13, -95, 1)); + assertEquals(0x0400000020000000L, longEncode(13, normalizeLat(95), 1)); + assertEquals(0x0400000020000001L, longEncode(13, normalizeLat(-95), 1)); expectThrows(IllegalArgumentException.class, () -> longEncode(0, 0, -1)); expectThrows(IllegalArgumentException.class, () -> longEncode(-1, 0, MAX_ZOOM + 1)); @@ -78,20 +80,20 @@ public void testLongEncodeFromString() { assertEquals(0x77FFFF4580000000L, longEncode(stringEncode(longEncode(179.999, 89.999, 29)))); assertEquals(0x740000BA7FFFFFFFL, longEncode(stringEncode(longEncode(-179.999, -89.999, 29)))); assertEquals(0x0800000040000001L, longEncode(stringEncode(longEncode(1, 1, 2)))); - assertEquals(0x0C00000060000000L, longEncode(stringEncode(longEncode(-20, 100, 3)))); + assertEquals(0x0C00000060000000L, longEncode(stringEncode(longEncode(-20, normalizeLat(100), 3)))); assertEquals(0x71127D27C8ACA67AL, longEncode(stringEncode(longEncode(13, -15, 28)))); assertEquals(0x4C0077776003A9ACL, longEncode(stringEncode(longEncode(-12, 15, 19)))); - assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(-328.231870, 16.064082, 5)))); - assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(-590.769588, 89.549167, 25)))); - assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 25)))); - assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 29)))); - assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(-557.039740, -632.103969, 29)))); + assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(normalizeLon(-328.231870), 16.064082, 5)))); + assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(normalizeLon(-590.769588), 89.549167, 25)))); + assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(normalizeLon(999.787079), 51.830093, 25)))); + assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(normalizeLon(999.787079), 51.830093, 29)))); + assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(normalizeLon(-557.039740), normalizeLat(-632.103969), 29)))); assertEquals(0x7624FA4FA0000000L, longEncode(stringEncode(longEncode(13, 88, 29)))); assertEquals(0x7624FA4FBFFFFFFFL, longEncode(stringEncode(longEncode(13, -88, 29)))); assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 89, 1)))); assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -89, 1)))); - assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 95, 1)))); - assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -95, 1)))); + assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, normalizeLat(95), 1)))); + assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, normalizeLat(-95), 1)))); expectThrows(IllegalArgumentException.class, () -> longEncode("12/asdf/1")); expectThrows(IllegalArgumentException.class, () -> longEncode("foo")); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index d17016f850300..e5b23158d4fd4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1121,6 +1121,8 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE if (preserveSecurityIndices) { indexPatterns.add("-.security-*"); } + // always preserve inference index + indexPatterns.add("-.inference"); final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); final Response response = adminClient().performRequest(deleteRequest); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 34ebdcb7f9f9f..ca789fee7b744 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -623,6 +623,9 @@ public String getDeploymentId() { * @return the estimated memory (in bytes) required for the model deployment to run */ public long estimateMemoryUsageBytes() { + if (numberOfAllocations == 0) { + return 0; + } // We already take into account 2x the model bytes. If the cache size is larger than the model bytes, then // we need to take it into account when returning the estimate. if (cacheSize != null && cacheSize.getBytes() > modelBytes) { @@ -796,6 +799,9 @@ public static long estimateMemoryUsageBytes( long perAllocationMemoryBytes, int numberOfAllocations ) { + if (numberOfAllocations == 0) { + return 0; + } // While loading the model in the process we need twice the model size. // 1. If ELSER v1 or v2 then 2004MB diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index e33f9b1c20527..1f7d03ba9d905 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,11 +43,11 @@ public Alias(Source source, String name, Expression child) { this(source, name, child, null); } - public Alias(Source source, String name, Expression child, NameId id) { + public Alias(Source source, String name, Expression child, @Nullable NameId id) { this(source, name, child, id, false); } - public Alias(Source source, String name, Expression child, NameId id, boolean synthetic) { + public Alias(Source source, String name, Expression child, @Nullable NameId id, boolean synthetic) { super(source, name, singletonList(child), id, synthetic); this.child = child; } @@ -55,7 +56,7 @@ public Alias(Source source, String name, Expression child, NameId id, boolean sy /** * Old constructor from when this had a qualifier string. Still needed to not break serialization. */ - private Alias(Source source, String name, String qualifier, Expression child, NameId id, boolean synthetic) { + private Alias(Source source, String name, String qualifier, Expression child, @Nullable NameId id, boolean synthetic) { this(source, name, child, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 05c414298fd33..45f42a754910d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -41,15 +42,15 @@ public static List getNamedWriteables() { // can the attr be null - typically used in JOINs private final Nullability nullability; - public Attribute(Source source, String name, NameId id) { + public Attribute(Source source, String name, @Nullable NameId id) { this(source, name, Nullability.TRUE, id); } - public Attribute(Source source, String name, Nullability nullability, NameId id) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id) { this(source, name, nullability, id, false); } - public Attribute(Source source, String name, Nullability nullability, NameId id, boolean synthetic) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id, boolean synthetic) { super(source, name, emptyList(), id, synthetic); this.nullability = nullability; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index 767d2f45f90e4..4076acdb7e7b8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -6,21 +6,25 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; -import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Attribute for an ES field. * To differentiate between the different type of fields this class offers: @@ -37,32 +41,31 @@ public class FieldAttribute extends TypedAttribute { FieldAttribute::readFrom ); - private final FieldAttribute parent; - private final String path; + private final String parentName; private final EsField field; public FieldAttribute(Source source, String name, EsField field) { this(source, null, name, field); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { - this(source, parent, name, field, Nullability.TRUE, null, false); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field) { + this(source, parentName, name, field, Nullability.TRUE, null, false); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, boolean synthetic) { - this(source, parent, name, field, Nullability.TRUE, null, synthetic); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field, boolean synthetic) { + this(source, parentName, name, field, Nullability.TRUE, null, synthetic); } public FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, field.getDataType(), field, nullability, id, synthetic); + this(source, parentName, name, field.getDataType(), field, nullability, id, synthetic); } /** @@ -71,17 +74,16 @@ public FieldAttribute( */ FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { super(source, name, type, nullability, id, synthetic); - this.path = parent != null ? parent.name() : StringUtils.EMPTY; - this.parent = parent; + this.parentName = parentName; this.field = field; } @@ -91,16 +93,16 @@ public FieldAttribute( */ private FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, type, field, nullability, id, synthetic); + this(source, parentName, name, type, field, nullability, id, synthetic); } private FieldAttribute(StreamInput in) throws IOException { @@ -114,8 +116,8 @@ private FieldAttribute(StreamInput in) throws IOException { */ this( Source.readFrom((StreamInput & PlanStreamInput) in), - in.readOptionalWriteable(FieldAttribute::readFrom), - ((PlanStreamInput) in).readCachedString(), + readParentName(in), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), EsField.readFrom(in), in.readOptionalString(), @@ -129,8 +131,8 @@ private FieldAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - out.writeOptionalWriteable(parent); - ((PlanStreamOutput) out).writeCachedString(name()); + writeParentName(out); + writeCachedStringWithVersionCheck(out, name()); dataType().writeTo(out); field.writeTo(out); // We used to write the qualifier here. We can still do if needed in the future. @@ -145,6 +147,26 @@ public static FieldAttribute readFrom(StreamInput in) throws IOException { return ((PlanStreamInput) in).readAttributeWithCache(FieldAttribute::new); } + private void writeParentName(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + ((PlanStreamOutput) out).writeOptionalCachedString(parentName); + } else { + // Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any + // fake FieldAttribute here as long as the name is correct. + FieldAttribute fakeParent = parentName() == null ? null : new FieldAttribute(Source.EMPTY, parentName(), field()); + out.writeOptionalWriteable(fakeParent); + } + } + + private static String readParentName(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + return ((PlanStreamInput) in).readOptionalCachedString(); + } + + FieldAttribute parent = in.readOptionalWriteable(FieldAttribute::readFrom); + return parent == null ? null : parent.name(); + } + @Override public String getWriteableName() { return ENTRY.name; @@ -152,15 +174,22 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, (String) null, nullable(), id(), synthetic()); - } - - public FieldAttribute parent() { - return parent; + return NodeInfo.create( + this, + FieldAttribute::new, + parentName, + name(), + dataType(), + field, + (String) null, + nullable(), + id(), + synthetic() + ); } - public String path() { - return path; + public String parentName() { + return parentName; } /** @@ -174,7 +203,7 @@ public String fieldName() { if ((synthetic() || name().startsWith(SYNTHETIC_ATTRIBUTE_NAME_PREFIX)) == false) { return name(); } - return Strings.hasText(path) ? path + "." + field.getName() : field.getName(); + return Strings.hasText(parentName) ? parentName + "." + field.getName() : field.getName(); } public EsField.Exact getExactInfo() { @@ -190,13 +219,13 @@ public FieldAttribute exactAttribute() { } private FieldAttribute innerField(EsField type) { - return new FieldAttribute(source(), this, name() + "." + type.getName(), type, nullable(), id(), synthetic()); + return new FieldAttribute(source(), name(), name() + "." + type.getName(), type, nullable(), id(), synthetic()); } @Override protected Attribute clone(Source source, String name, DataType type, Nullability nullability, NameId id, boolean synthetic) { // Ignore `type`, this must be the same as the field's type. - return new FieldAttribute(source, parent, name, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, field, nullability, id, synthetic); } @Override @@ -206,13 +235,13 @@ public Attribute withDataType(DataType type) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), path, field); + return Objects.hash(super.hashCode(), parentName, field); } @Override public boolean equals(Object obj) { return super.equals(obj) - && Objects.equals(path, ((FieldAttribute) obj).path) + && Objects.equals(parentName, ((FieldAttribute) obj).parentName) && Objects.equals(field, ((FieldAttribute) obj).field); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 539c55ba341cf..3641812cd6cad 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; @@ -59,7 +60,7 @@ public MetadataAttribute( String name, DataType dataType, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { @@ -79,9 +80,9 @@ private MetadataAttribute( Source source, String name, DataType dataType, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index ba467910bed0d..3b018f09e5ebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.ArrayList; @@ -32,11 +33,11 @@ public static List getNamedWriteables() { private final NameId id; private final boolean synthetic; - public NamedExpression(Source source, String name, List children, NameId id) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id) { this(source, name, children, id, false); } - public NamedExpression(Source source, String name, List children, NameId id, boolean synthetic) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id, boolean synthetic) { super(source, children); this.name = name; this.id = id == null ? new NameId() : id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java index 504e1eae8d880..3626c5d26f235 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -31,7 +32,14 @@ public ReferenceAttribute(Source source, String name, DataType dataType) { this(source, name, dataType, Nullability.FALSE, null, false); } - public ReferenceAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + public ReferenceAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, dataType, nullability, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java index 0350abef99992..f8a041110798c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -15,7 +16,14 @@ public abstract class TypedAttribute extends Attribute { private final DataType dataType; - protected TypedAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + protected TypedAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, nullability, id, synthetic); this.dataType = dataType; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java index d8a35adcbffde..a971a15a23c86 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -33,7 +34,7 @@ public UnresolvedAttribute(Source source, String name, String unresolvedMessage) } @SuppressWarnings("this-escape") - public UnresolvedAttribute(Source source, String name, NameId id, String unresolvedMessage, Object resolutionMetadata) { + public UnresolvedAttribute(Source source, String name, @Nullable NameId id, String unresolvedMessage, Object resolutionMetadata) { super(source, name, id); this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(name(), null) : unresolvedMessage; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index cb1a7b2eb6fe0..12699ca3ee720 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -14,8 +14,6 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.math.BigInteger; @@ -32,6 +30,8 @@ import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; public enum DataType { /** @@ -535,12 +535,12 @@ public DataType counter() { } public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(typeName); + writeCachedStringWithVersionCheck(out, typeName); } public static DataType readFrom(StreamInput in) throws IOException { // TODO: Use our normal enum serialization pattern - return readFrom(((PlanStreamInput) in).readCachedString()); + return readFrom(readCachedStringWithVersionCheck(in)); } /** diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java index 7c4b98c5af84e..3a81ec2a6f17d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java @@ -8,12 +8,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index with the {@code date} type */ @@ -28,12 +29,12 @@ private DateEsField(String name, DataType dataType, Map propert } protected DateEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 6235176d82de6..47dadcbb11de2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -18,6 +18,9 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index. */ @@ -60,7 +63,7 @@ public EsField(String name, DataType esDataType, Map properties } public EsField(StreamInput in) throws IOException { - this.name = ((PlanStreamInput) in).readCachedString(); + this.name = readCachedStringWithVersionCheck(in); this.esDataType = readDataType(in); this.properties = in.readImmutableMap(EsField::readFrom); this.aggregatable = in.readBoolean(); @@ -68,7 +71,7 @@ public EsField(StreamInput in) throws IOException { } private DataType readDataType(StreamInput in) throws IOException { - String name = ((PlanStreamInput) in).readCachedString(); + String name = readCachedStringWithVersionCheck(in); if (in.getTransportVersion().before(TransportVersions.ESQL_NESTED_UNSUPPORTED) && name.equalsIgnoreCase("NESTED")) { /* * The "nested" data type existed in older versions of ESQL but was @@ -98,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { * This needs to be overridden by subclasses for specific serialization */ public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(name); + writeCachedStringWithVersionCheck(out, name); esDataType.writeTo(out); out.writeMap(properties, (o, x) -> x.writeTo(out)); out.writeBoolean(aggregatable); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index 40825af56ccfe..f83e4652ebebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -20,6 +18,9 @@ import java.util.TreeMap; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Representation of field mapped differently across indices. * Used during mapping discovery only. @@ -54,7 +55,7 @@ private InvalidMappedField(String name, String errorMessage, Map types() { @@ -63,7 +64,7 @@ public Set types() { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeString(errorMessage); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java index 48995bafec451..8b88884a0ce17 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java @@ -8,8 +8,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Collections; @@ -17,6 +15,8 @@ import java.util.Objects; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an ES index with the {@code keyword} type. @@ -61,7 +61,7 @@ protected KeywordEsField( public KeywordEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), KEYWORD, in.readImmutableMap(EsField::readFrom), in.readBoolean(), @@ -73,7 +73,7 @@ public KeywordEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeInt(precision); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java index 522cb682c0943..0d7f9ee425d6a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.HashMap; @@ -19,6 +17,9 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * During IndexResolution it could occur that the same field is mapped to different types in different indices. * The class MultiTypeEfField.UnresolvedField holds that information and allows for later resolution of the field @@ -39,7 +40,7 @@ public MultiTypeEsField(String name, DataType dataType, boolean aggregatable, Ma protected MultiTypeEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> i.readNamedWriteable(Expression.class)) @@ -48,7 +49,7 @@ protected MultiTypeEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); getDataType().writeTo(out); out.writeBoolean(isAggregatable()); out.writeMap(getIndexToConversionExpressions(), (o, v) -> out.writeNamedWriteable(v)); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java index c6c494ef289bb..ed0d32a7696eb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -19,6 +17,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an es index with the {@code text} type. @@ -34,12 +34,12 @@ public TextEsField(String name, Map properties, boolean hasDocV } protected TextEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeBoolean(isAlias()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java index 980620cb98847..02ce741243c20 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java @@ -8,14 +8,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; import java.util.Objects; import java.util.TreeMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index that cannot be supported by ESQL. * All the subfields (properties) of an unsupported type are also be unsupported. @@ -37,8 +38,8 @@ public UnsupportedEsField(String name, String originalType, String inherited, Ma public UnsupportedEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), + readCachedStringWithVersionCheck(in), in.readOptionalString(), in.readImmutableMap(EsField::readFrom) ); @@ -46,8 +47,8 @@ public UnsupportedEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); - ((PlanStreamOutput) out).writeCachedString(getOriginalType()); + writeCachedStringWithVersionCheck(out, getName()); + writeCachedStringWithVersionCheck(out, getOriginalType()); out.writeOptionalString(getInherited()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index 826b0cbfa3498..e8ccae3429001 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -49,4 +50,13 @@ public interface PlanStreamInput { A readEsFieldWithCache() throws IOException; String readCachedString() throws IOException; + + static String readCachedStringWithVersionCheck(StreamInput planStreamInput) throws IOException { + if (planStreamInput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + return planStreamInput.readString(); + } + return ((PlanStreamInput) planStreamInput).readCachedString(); + } + + String readOptionalCachedString() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java index e4797411c3796..fb4af33d2fd60 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -33,4 +35,14 @@ public interface PlanStreamOutput { boolean writeEsFieldCacheHeader(EsField field) throws IOException; void writeCachedString(String field) throws IOException; + + static void writeCachedStringWithVersionCheck(StreamOutput planStreamOutput, String string) throws IOException { + if (planStreamOutput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + planStreamOutput.writeString(string); + } else { + ((PlanStreamOutput) planStreamOutput).writeCachedString(string); + } + } + + void writeOptionalCachedString(String str) throws IOException; } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java index 1662b7f973c9d..c7e5056ed0267 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java @@ -12,9 +12,9 @@ import org.elasticsearch.xpack.esql.core.type.EsField; public class FieldAttributeTestUtils { - public static final FieldAttribute newFieldAttributeWithType( + public static FieldAttribute newFieldAttributeWithType( Source source, - FieldAttribute parent, + String parentName, String name, DataType type, EsField field, @@ -22,6 +22,6 @@ public static final FieldAttribute newFieldAttributeWithType( NameId id, boolean synthetic ) { - return new FieldAttribute(source, parent, name, type, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, type, field, nullability, id, synthetic); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index fe7b945a9b3c1..b18f58b0a43cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -228,13 +228,13 @@ public static List mappingAsAttributes(Source source, Map list, Source source, FieldAttribute parent, Map mapping) { + private static void mappingAsAttributes(List list, Source source, String parentName, Map mapping) { for (Map.Entry entry : mapping.entrySet()) { String name = entry.getKey(); EsField t = entry.getValue(); if (t != null) { - name = parent == null ? name : parent.fieldName() + "." + name; + name = parentName == null ? name : parentName + "." + name; var fieldProperties = t.getProperties(); var type = t.getDataType().widenSmallNumeric(); // due to a bug also copy the field since the Attribute hierarchy extracts the data type @@ -245,14 +245,14 @@ private static void mappingAsAttributes(List list, Source source, Fie FieldAttribute attribute = t instanceof UnsupportedEsField uef ? new UnsupportedAttribute(source, name, uef) - : new FieldAttribute(source, parent, name, t); + : new FieldAttribute(source, parentName, name, t); // primitive branch if (DataType.isPrimitive(type)) { list.add(attribute); } // allow compound object even if they are unknown if (fieldProperties.isEmpty() == false) { - mappingAsAttributes(list, source, attribute, fieldProperties); + mappingAsAttributes(list, source, attribute.name(), fieldProperties); } } } @@ -1252,7 +1252,7 @@ private Expression createIfDoesNotAlreadyExist( // NOTE: The name has to start with $$ to not break bwc with 8.15 - in that version, this is how we had to mark this as // synthetic to work around a bug. String unionTypedFieldName = Attribute.rawTemporaryName(fa.name(), "converted_to", resolvedField.getDataType().typeName()); - FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parent(), unionTypedFieldName, resolvedField, true); + FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parentName(), unionTypedFieldName, resolvedField, true); int existingIndex = unionFieldAttributes.indexOf(unionFieldAttribute); if (existingIndex >= 0) { // Do not generate multiple name/type combinations with different IDs @@ -1281,7 +1281,7 @@ private Expression typeSpecificConvert(AbstractConvertFunction convert, Source s FieldAttribute originalFieldAttr = (FieldAttribute) convert.field(); FieldAttribute resolvedAttr = new FieldAttribute( source, - originalFieldAttr.parent(), + originalFieldAttr.parentName(), originalFieldAttr.name(), field, originalFieldAttr.nullable(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 2c709de7717ce..d372eddb961ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -29,6 +30,9 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Unsupported attribute meaning an attribute that has been found yet cannot be used (hence why UnresolvedAttribute * cannot be used) expect in special conditions (currently only in projections to allow it to flow through @@ -63,11 +67,11 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage) { this(source, name, field, customMessage, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage, @Nullable NameId id) { super(source, null, name, field, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; this.message = customMessage == null ? errorMessage(name(), field) : customMessage; @@ -76,7 +80,7 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field private UnsupportedAttribute(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), @@ -88,7 +92,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeCachedString(name()); + writeCachedStringWithVersionCheck(out, name()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field().writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 9003cbec12d1e..1e1cc3b86a9d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -37,6 +37,8 @@ import java.util.Map; import java.util.function.LongFunction; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; + /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -224,7 +226,7 @@ public A readEsFieldWithCache() throws IOException { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); cacheId = -1 - cacheId; EsField result = reader.read(this); @@ -234,7 +236,7 @@ public A readEsFieldWithCache() throws IOException { return (A) esFieldFromCache(cacheId); } } else { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); return (A) reader.read(this); } @@ -245,9 +247,6 @@ public A readEsFieldWithCache() throws IOException { */ @Override public String readCachedString() throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - return readString(); - } int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { String string = readString(); @@ -259,6 +258,11 @@ public String readCachedString() throws IOException { } } + @Override + public String readOptionalCachedString() throws IOException { + return readBoolean() ? readCachedString() : null; + } + private EsField esFieldFromCache(int id) throws IOException { EsField field = esFieldsCache[id]; if (field == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index b633b10122eb3..615c4266620c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -30,6 +30,8 @@ import java.util.IdentityHashMap; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -195,7 +197,7 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { cacheId = cacheEsField(field); writeZLong(-1 - cacheId); } - writeCachedString(field.getWriteableName()); + writeCachedStringWithVersionCheck(this, field.getWriteableName()); return true; } @@ -207,10 +209,6 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { */ @Override public void writeCachedString(String string) throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - writeString(string); - return; - } Integer cacheId = stringCache.get(string); if (cacheId != null) { writeZLong(cacheId); @@ -226,6 +224,16 @@ public void writeCachedString(String string) throws IOException { writeString(string); } + @Override + public void writeOptionalCachedString(String str) throws IOException { + if (str == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeCachedString(str); + } + } + private Integer esFieldIdFromCache(EsField field) { return cachedEsFields.get(field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 951fc7ad1cf29..eb72009638396 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -112,7 +112,12 @@ private static List flatten(Source source, Map mappi EsField t = entry.getValue(); if (t != null) { - FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); + FieldAttribute f = new FieldAttribute( + source, + parent != null ? parent.name() : null, + parent != null ? parent.name() + "." + name : name, + t + ); list.add(f); // object or nested if (t.getProperties().isEmpty() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java index e8f0333791844..6b2040f58f84c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java @@ -20,7 +20,7 @@ public class FieldAttributeTests extends AbstractAttributeTestCase { public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepresentable) { Source source = Source.EMPTY; - FieldAttribute parent = maxDepth == 0 || randomBoolean() ? null : createFieldAttribute(maxDepth - 1, onlyRepresentable); + String parentName = maxDepth == 0 || randomBoolean() ? null : randomAlphaOfLength(3); String name = randomAlphaOfLength(5); DataType type = onlyRepresentable ? randomValueOtherThanMany(t -> false == DataType.isRepresentable(t), () -> randomFrom(DataType.types())) @@ -28,7 +28,7 @@ public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepr EsField field = AbstractEsFieldTypeTests.randomAnyEsField(maxDepth); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } @Override @@ -39,20 +39,20 @@ protected FieldAttribute create() { @Override protected FieldAttribute mutate(FieldAttribute instance) { Source source = instance.source(); - FieldAttribute parent = instance.parent(); + String parentName = instance.parentName(); String name = instance.name(); DataType type = instance.dataType(); EsField field = instance.field(); Nullability nullability = instance.nullable(); boolean synthetic = instance.synthetic(); switch (between(0, 5)) { - case 0 -> parent = randomValueOtherThan(parent, () -> randomBoolean() ? null : createFieldAttribute(2, false)); + case 0 -> parentName = randomValueOtherThan(parentName, () -> randomBoolean() ? null : randomAlphaOfLength(2)); case 1 -> name = randomAlphaOfLength(name.length() + 1); case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 3 -> field = randomValueOtherThan(field, () -> AbstractEsFieldTypeTests.randomAnyEsField(3)); case 4 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 5 -> synthetic = false == synthetic; } - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java index 687b83370f571..82dd5a88ffaf1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java @@ -182,4 +182,51 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expected)); } } + + public static EsIndex deeplyNestedIndex(int depth, int childrenPerLevel) { + String rootFieldName = "root"; + Map fields = Map.of(rootFieldName, fieldWithRecursiveChildren(depth, childrenPerLevel, rootFieldName)); + + return new EsIndex("deeply-nested", fields); + } + + private static EsField fieldWithRecursiveChildren(int depth, int childrenPerLevel, String name) { + assert depth >= 1; + + Map children = new TreeMap<>(); + String childName; + if (depth == 1) { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "leaf" + i; + children.put(childName, new EsField(childName, DataType.KEYWORD, Map.of(), true)); + } + } else { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "level" + depth + "child" + i; + children.put(childName, fieldWithRecursiveChildren(depth - 1, childrenPerLevel, childName)); + } + } + + return new EsField(name, DataType.OBJECT, children, false); + } + + /** + * Test de-/serialization and size on the wire for an index that has multiple levels of children: + * A single root with 9 children, each of which has 9 children etc. 6 levels deep. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expectedSize = ByteSizeValue.ofBytes(9425494); + /* + * History: + * 9425494b - string serialization #112929 + */ + + int depth = 6; + int childrenPerLevel = 9; + + try (BytesStreamOutput out = new BytesStreamOutput(); var pso = new PlanStreamOutput(out, null)) { + deeplyNestedIndex(depth, childrenPerLevel).writeTo(pso); + assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expectedSize)); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 33252b9dbaaa3..d3e1710a715af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -44,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { @@ -118,26 +116,13 @@ public void testWriteAttributeMultipleTimes() throws IOException { for (int i = 0; i < occurrences; i++) { planStream.writeNamedWriteable(attribute); } - int depth = 0; - Attribute parent = attribute; - while (parent != null) { - depth++; - parent = parent instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(planStream.cachedAttributes.size(), is(depth)); + assertThat(planStream.cachedAttributes.size(), is(1)); try (PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), REGISTRY, configuration)) { Attribute first = in.readNamedWriteable(Attribute.class); for (int i = 1; i < occurrences; i++) { Attribute next = in.readNamedWriteable(Attribute.class); assertThat(first, sameInstance(next)); } - for (int i = 0; i < depth; i++) { - assertThat(first, equalTo(attribute)); - first = first instanceof FieldAttribute f ? f.parent() : null; - attribute = attribute instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(first, is(nullValue())); - assertThat(attribute, is(nullValue())); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 114aed68761fe..964039268e30d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -4769,12 +4769,24 @@ public void testPushTopNInlineDistanceToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "$$order_by$0$0")); + // Depending on what is run before this test, the synthetic name could have variable suffixes, so we must only assert on the prefix + assertThat( + names(project.projections()), + contains( + equalTo("abbrev"), + equalTo("name"), + equalTo("location"), + equalTo("country"), + equalTo("city"), + startsWith("$$order_by$0$") + ) + ); var extract = as(project.child(), FieldExtractExec.class); assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); - assertThat(alias.name(), is("$$order_by$0$0")); + assertThat(alias.name(), startsWith("$$order_by$0$")); + var aliasName = alias.name(); // We need this name to know what to assert on later when comparing the Order to the Sort var stDistance = as(alias.child(), StDistance.class); assertThat(stDistance.left().toString(), startsWith("location")); extract = as(evalExec.child(), FieldExtractExec.class); @@ -4784,7 +4796,7 @@ public void testPushTopNInlineDistanceToSource() { // Assert that the TopN(distance) is pushed down as geo-sort(location) assertThat(source.limit(), is(topN.limit())); Set orderSet = orderAsSet(topN.order()); - Set sortsSet = sortsAsSet(source.sorts(), Map.of("location", "$$order_by$0$0")); + Set sortsSet = sortsAsSet(source.sorts(), Map.of("location", aliasName)); assertThat(orderSet, is(sortsSet)); // Fine-grained checks on the pushed down sort diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index 1f52795dbacd7..5989c0de6b61d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -80,20 +80,67 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774214)); + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774192)); /* * History: * 2 gb+ - start * 43.3mb - Cache attribute subclasses #111447 * 5.6mb - shorten error messages for UnsupportedAttributes #111973 * 3.1mb - cache EsFields #112008 - * 2.6mb - string serialization #112929 + * 2774214b - string serialization #112929 + * 2774192b - remove field attribute #112881 */ } + private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { + EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); + testSerializePlanWithIndex(index, expected); + } + + /** + * Test the size of serializing a plan like + * FROM index | LIMIT 10 + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(47252411); + /* + * History: + * 48223371b - string serialization #112929 + * 47252411b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 8; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected); + } + /** - * Test the size of serializing a plan with many conflicts. Callers of - * this method intentionally use a very precise size for the serialized + * Test the size of serializing a plan like + * FROM index | LIMIT 10 | KEEP one_single_field + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(9425806); + /* + * History: + * 9426058b - string serialization #112929 + * 9425806b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 9; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected, false); + } + + /** + * Test the size of serializing the physical plan that will be sent to a data node. + * The plan corresponds to `FROM index | LIMIT 10`. + * Callers of this method intentionally use a very precise size for the serialized * data so a programmer making changes has to think when this size changes. *

* In general, shrinking the over the wire size is great and the precise @@ -108,10 +155,14 @@ public void testManyTypeConflictsWithParent() throws IOException { * ESQL impossible to use at all for big mappings with many conflicts. *

*/ - private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { - EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); - List attributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); - EsRelation relation = new EsRelation(randomSource(), index, attributes, IndexMode.STANDARD); + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected) throws IOException { + testSerializePlanWithIndex(index, expected, true); + } + + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected, boolean keepAllFields) throws IOException { + List allAttributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); + List keepAttributes = keepAllFields ? allAttributes : List.of(allAttributes.get(0)); + EsRelation relation = new EsRelation(randomSource(), index, keepAttributes, IndexMode.STANDARD); Limit limit = new Limit(randomSource(), new Literal(randomSource(), 10, DataType.INTEGER), relation); Project project = new Project(randomSource(), limit, limit.output()); FragmentExec fragmentExec = new FragmentExec(project); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 33a97f1e91621..260d4e663dafd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -397,6 +397,7 @@ public void updateModelTransaction(Model newModel, Model existingModel, ActionLi logger.error( format("Failed to update inference endpoint [%s] due to [%s]", inferenceEntityId, configResponse.buildFailureMessage()) ); + preventDeletionLock.remove(inferenceEntityId); // Since none of our updates succeeded at this point, we can simply return. finalListener.onFailure( new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml index 9a4d7f4416164..2980d42d22c3e 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -1,4 +1,8 @@ setup: + - skip: + features: + - close_to + - contains - requires: cluster_features: "text_similarity_reranker_retriever_supported" reason: semantic reranking introduced in 8.15.0 @@ -206,8 +210,8 @@ setup: size: 10 explain: true - - match: { hits.hits.0._id: "doc_2" } - - match: { hits.hits.1._id: "doc_1" } + - contains: { hits.hits: { _id: "doc_2" } } + - contains: { hits.hits: { _id: "doc_1" } } - close_to: { hits.hits.0._explanation.value: { value: 0.4, error: 0.000001 } } - match: {hits.hits.0._explanation.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java similarity index 97% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index ab78f48b6cddf..c5ccee1d36b72 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java index 123ca3b806153..40aab696dc9c4 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.RestClient; import org.elasticsearch.index.IndexMode; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java similarity index 96% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java index a024a2c0f303c..63094852c3626 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java similarity index 92% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index 22ac2b6d7d239..dbee5d1d2de8c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java similarity index 96% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java index 6464b4e966823..60c7d07115ef2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java similarity index 89% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java index d07e29c6b6b31..c03e8aea9c2ac 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.Mapper; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java similarity index 84% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java index e1cafc40f706f..0329f7723a108 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java similarity index 84% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java index dd80917b5f080..1c425cf30907b 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java similarity index 76% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java index b48dce9ca4c57..83344b688ff8c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java similarity index 97% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 4c896e1f262b2..dd7806fc9c8fa 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -18,8 +16,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; -import org.elasticsearch.datastreams.logsdb.qa.matchers.Matcher; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -31,6 +27,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; +import org.elasticsearch.xpack.logsdb.qa.matchers.Matcher; import org.hamcrest.Matchers; import java.io.IOException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java similarity index 82% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java index 6a20626634499..3b141908f45b1 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java similarity index 63% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java index 6b0e4d4d0b34d..c1f97823b963a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java similarity index 74% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java index d6cfebed1445a..5adf44f10be45 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java similarity index 83% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java index b8f68a3c17494..b98ad65ac4d4f 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; @@ -15,8 +13,8 @@ import java.util.Arrays; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintArrays; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintArrays; class ArrayEqualMatcher extends GenericEqualsMatcher { ArrayEqualMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java similarity index 85% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java index 3edc9aeec6da8..933c7eb86f65a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java @@ -1,20 +1,18 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; public class GenericEqualsMatcher extends Matcher { protected final XContentBuilder actualMappings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java similarity index 83% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java index 00e22e9714283..447aa21b932c2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java @@ -1,21 +1,19 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; public class ListEqualMatcher extends GenericEqualsMatcher> { public ListEqualMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java similarity index 73% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java index 4334209bcfa30..a890a0375ef03 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import java.util.Objects; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java index 313bf3b3d2392..e08e401c19530 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java @@ -1,17 +1,15 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.source.SourceMatcher; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.source.SourceMatcher; import java.util.List; import java.util.Map; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java similarity index 80% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java index ddd8212c093eb..122e3b2d6261c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java similarity index 68% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java index d071419f5aa6a..f2f08b1dfac14 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java @@ -1,18 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; public class ObjectMatcher extends GenericEqualsMatcher { ObjectMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java similarity index 80% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java index 5d7ab12f06d7b..d6812c41f7611 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java @@ -1,17 +1,15 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Objects; @@ -20,8 +18,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; class DynamicFieldMatcher { private final XContentBuilder actualMappings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java similarity index 92% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java index 96423125410a7..0c970f1b5fd9a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java @@ -1,18 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.math.BigInteger; import java.util.List; @@ -22,8 +20,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; interface FieldSpecificMatcher { MatchResult match(List actual, List expected, Map actualMapping, Map expectedMapping); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java similarity index 88% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java index ef5775eec3703..dbe73e3c2a4c2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import java.util.ArrayList; import java.util.HashMap; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java index 0d12fabe081fa..cd2bb361d065d 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java @@ -1,29 +1,27 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.datastreams.logsdb.qa.matchers.GenericEqualsMatcher; -import org.elasticsearch.datastreams.logsdb.qa.matchers.ListEqualMatcher; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.GenericEqualsMatcher; +import org.elasticsearch.xpack.logsdb.qa.matchers.ListEqualMatcher; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; public class SourceMatcher extends GenericEqualsMatcher>> { private final Map actualNormalizedMapping; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java similarity index 86% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java index 23e3f090cafc3..c21383d411212 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import java.util.ArrayList; import java.util.Collections; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml index 3f2bca2e4bcd9..2f320c2cad966 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml @@ -291,7 +291,7 @@ create logsdb data stream with custom sorting without host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -341,7 +341,7 @@ create logsdb data stream with custom sorting and host object: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-nginx-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -400,7 +400,7 @@ create logsdb data stream with custom sorting and dynamically mapped host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-kafka-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -465,7 +465,7 @@ create logsdb data stream with custom sorting and host.name object: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-nginx-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -521,7 +521,7 @@ create logsdb data stream with default sorting on malformed host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-win-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -585,7 +585,7 @@ create logsdb data stream with custom sorting and host.name date field: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -638,7 +638,7 @@ create logsdb data stream with custom sorting and missing host.name field mappin - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -690,7 +690,7 @@ create logsdb data stream with custom sorting and host.name field without doc va - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-dev] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -732,7 +732,7 @@ create logsdb data stream with incompatible ignore_above on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-ignore-above] has index patterns [logsdb-ignore-above] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-ignore-above] has index patterns [logsdb-ignore-above] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-ignore-above] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-ignore-above body: @@ -780,7 +780,7 @@ create logsdb data stream with no sorting and host.name as text: - do: allowed_warnings: - - "index template [logsdb-index-template-non-keyword] has index patterns [logsdb-non-keyword] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-non-keyword] has index patterns [logsdb-non-keyword] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-non-keyword] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-non-keyword body: @@ -814,7 +814,7 @@ create logsdb data stream without index sorting and ignore_above on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-ignore-above-override] has index patterns [logsdb-ignore-above-override] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-ignore-above-override] has index patterns [logsdb-ignore-above-override] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-ignore-above-override] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-ignore-above-override body: @@ -860,7 +860,7 @@ create logsdb data stream with host.name as alias and sorting on it: - do: allowed_warnings: - - "index template [logsdb-index-template-alias] has index patterns [logsdb-alias] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-alias] has index patterns [logsdb-alias] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-alias] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-alias body: @@ -898,7 +898,7 @@ create logsdb data stream with multi-fields on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-multi-fields] has index patterns [logsdb-multi-fields] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-multi-fields] has index patterns [logsdb-multi-fields] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-multi-fields] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-multi-fields body: @@ -945,7 +945,7 @@ create logsdb data stream with multi-fields on host.name and no sorting: - do: allowed_warnings: - - "index template [ logsdb-no-sort-multi-fields-template ] has index patterns [logsdb-no-sort-multi-fields] matching patterns from existing older templates [global]" + - "index template [logsdb-no-sort-multi-fields-template] has index patterns [logsdb-no-sort-multi-fields] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-no-sort-multi-fields-template] will take precedence during new index creation" indices.put_index_template: name: logsdb-no-sort-multi-fields-template body: @@ -980,7 +980,7 @@ create logsdb data stream with custom empty sorting: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-empty] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -1027,7 +1027,7 @@ create logsdb data stream with custom sorting on timestamp: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-dev] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 18d974473251b..fee3d729f8dfd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -30,6 +30,10 @@ import static org.elasticsearch.core.Strings.format; +/** + * This handles ML autoscaling just for classic cloud. + * For serverless, see: {@link MlAutoscalingResourceTracker}. + */ public final class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { private static final Logger logger = LogManager.getLogger(MlAutoscalingDeciderService.class); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index 9a9fbfa0340a9..6f14e2649a394 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -48,7 +48,8 @@ import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; /** - * backend for new kubernetes based autoscaler. + * This handles ML autoscaling just for serverless. + * For classic cloud, see: {@link MlAutoscalingDeciderService}. */ public final class MlAutoscalingResourceTracker { private static final Logger logger = LogManager.getLogger(MlAutoscalingResourceTracker.class); @@ -242,72 +243,72 @@ static void getMemoryAndProcessors( final int numMissingProcessors = numMissingAllocations * numberOfThreadsPerAllocation; int numExistingProcessorsToBeUsed = Math.min(numMissingProcessors, numberOfAvailableProcessors); + if (numberOfRequestedAllocations == 0) { + continue; + } if (assignment.getNodeRoutingTable().isEmpty() == false && assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { // Ignore states that don't consume memory, for example all allocations are failed or stopped // if the node routing table is empty, then it will match the above condition, but it needs to be handled in the next branch continue; + } + + if (assignment.getNodeRoutingTable().isEmpty() == false) { + // if the routing table is non-empty, this is an existing model + existingModelMemoryBytes += estimatedMemoryUsage; } else { + // only increase memory requirements for new models + extraPerNodeModelMemoryBytes += Math.max(extraPerNodeModelMemoryBytes, estimatedMemoryUsage); + extraModelMemoryInBytes += estimatedMemoryUsage; + } - if (assignment.getNodeRoutingTable().isEmpty() == false) { - // if the routing table is non-empty, this is an existing model - existingModelMemoryBytes += estimatedMemoryUsage; - } else { - // only increase memory requirements for new models - extraPerNodeModelMemoryBytes += Math.max(extraPerNodeModelMemoryBytes, estimatedMemoryUsage); - extraModelMemoryInBytes += estimatedMemoryUsage; + // if not low priority, check processor requirements. + if (Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) == false) { + if (numMissingProcessors > numberOfAvailableProcessors) { + // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here + extraProcessors += numMissingProcessors - numExistingProcessorsToBeUsed; + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, 1); // if extra processors >0, we need at least 1 + // extraPerNodeProcessors } - - // if not low priority, check processor requirements. - if (Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) == false) { - if (numMissingProcessors > numberOfAvailableProcessors) { - // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here - extraProcessors += numMissingProcessors - numExistingProcessorsToBeUsed; - extraPerNodeProcessors = Math.max(extraPerNodeProcessors, 1); // if extra processors >0, we need at least 1 - // extraPerNodeProcessors - } - if (perNodeAvailableProcessors < numberOfThreadsPerAllocation) { - extraPerNodeProcessors = Math.max(extraPerNodeProcessors, numberOfThreadsPerAllocation); - } - numberOfAvailableProcessors -= numExistingProcessorsToBeUsed; + if (perNodeAvailableProcessors < numberOfThreadsPerAllocation) { + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, numberOfThreadsPerAllocation); } + numberOfAvailableProcessors -= numExistingProcessorsToBeUsed; + } + + if (extraProcessors > 0 || extraPerNodeProcessors > 0 || extraModelMemoryInBytes > 0 || extraPerNodeModelMemoryBytes > 0) { + logger.info( + () -> format( + "trained model [%s] assigned to [%s], waiting for [%d] allocations to start due to missing hardware", + modelAssignment.getKey(), + Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), + numMissingAllocations + ) + ); + } - if (extraProcessors > 0 || extraPerNodeProcessors > 0 || extraModelMemoryInBytes > 0 || extraPerNodeModelMemoryBytes > 0) { - logger.info( - () -> format( - "trained model [%s] assigned to [%s], waiting for [%d] allocations to start due to missing hardware", - modelAssignment.getKey(), - Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), - numMissingAllocations + for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { + sumOfCurrentlyExistingAndUsedProcessors += modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() + * numberOfThreadsPerAllocation; + + jobRequirementsByNode.computeIfAbsent(node, k -> new ArrayList<>()) + .add( + MlJobRequirements.of( + estimatedMemoryUsage, + Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) + ? 0 + : modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() + * numberOfThreadsPerAllocation ) ); - } - - for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { - sumOfCurrentlyExistingAndUsedProcessors += modelAssignment.getValue() - .getNodeRoutingTable() - .get(node) - .getTargetAllocations() * numberOfThreadsPerAllocation; - - jobRequirementsByNode.computeIfAbsent(node, k -> new ArrayList<>()) - .add( - MlJobRequirements.of( - estimatedMemoryUsage, - Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) - ? 0 - : modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() - * numberOfThreadsPerAllocation - ) - ); - } - - // min(3, max(number of allocations over all deployed models) - // the minimum number of nodes is equal to the number of allocations, up to 3 - // if the number of allocations is greater than 3, then wantedMinNodes is still 3 - // in theory this should help availability for 2-3 allocations - // the planner should split over all available nodes - minNodes = Math.min(3, Math.max(minNodes, numberOfRequestedAllocations)); } + + // min(3, max(number of allocations over all deployed models) + // the minimum number of nodes is equal to the number of allocations, up to 3 + // if the number of allocations is greater than 3, then wantedMinNodes is still 3 + // in theory this should help availability for 2-3 allocations + // the planner should split over all available nodes + minNodes = Math.min(3, Math.max(minNodes, numberOfRequestedAllocations)); } // dummy autoscaling entity diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 3674dda3934bd..729bb708cc46f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; @@ -1800,6 +1801,172 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityAsMemory ); } + public void testGetMemoryAndProcessorsScaleDownForModelWithZeroAllocations() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" + ); + + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of(), + List.of(), + List.of(), + Map.of( + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).build() + ), + List.of( + DiscoveryNodeUtils.builder("ml-node-1") + .name("ml-node-name-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-node-1", memory), + 600000000, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(0, stats.currentTotalModelMemoryBytes()); + assertEquals(0, stats.currentTotalProcessorsInUse()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + + public void testGetMemoryAndProcessorsIgnoreThreadsOfModelWithZeroAllocations() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" + ); + + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of(), + List.of(), + List.of(), + Map.of( + "model-with-one-allocation", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-one-allocation", + "model-with-one-allocation-deployment", + 400, + 1, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + null + ).addRoutingEntry("ml-node-1", new RoutingInfo(1, 1, RoutingState.STARTED, "")).build(), + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 4, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).build() + ), + List.of( + DiscoveryNodeUtils.builder("ml-node-1") + .name("ml-node-name-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-node-1", memory), + 600000000, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(251659040, stats.currentTotalModelMemoryBytes()); + assertEquals(2, stats.currentTotalProcessorsInUse()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(0, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + private void assertAsync(Consumer> function, Consumer furtherTests) throws InterruptedException { CountDownLatch latch = new CountDownLatch(1); AtomicBoolean listenerCalled = new AtomicBoolean(false); diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml index 105efcec8bc65..d9db1fe387625 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml @@ -1,4 +1,9 @@ setup: + - skip: + features: + - close_to + - contains + - requires: cluster_features: ['rrf_retriever_composition_supported', 'text_similarity_reranker_retriever_supported'] reason: need to have support for rrf and semantic reranking composition @@ -125,9 +130,6 @@ setup: term: { topic: "science" } - }, - "sort": { - "integer": "asc" } } }, @@ -152,8 +154,8 @@ setup: - match: { hits.total.value: 3 } - length: { hits.hits: 2 } - - match: { hits.hits.0._id: "doc_1" } - - match: { hits.hits.1._id: "doc_3" } + - contains: { hits.hits: { _id: "doc_1" } } + - contains: { hits.hits: { _id: "doc_3" } } - match: { aggregations.topics.buckets.0.key: "science" } - match: { aggregations.topics.buckets.0.doc_count: 2 } @@ -304,11 +306,8 @@ setup: standard: { query: { term: { - topic: "science" + subtopic: "astronomy" } - }, - "sort": { - "integer": "asc" } } }, @@ -327,14 +326,13 @@ setup: - match: { hits.hits.0._id: "doc_2" } - match: { hits.hits.1._id: "doc_1" } - - match: { hits.hits.2._id: "doc_3" } - - close_to: { hits.hits.0._explanation.value: { value: 0.6666667, error: 0.000001 } } - - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.6666667\\].*/" } + - close_to: { hits.hits.0._explanation.value: { value: 0.833333, error: 0.0001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } - match: {hits.hits.0._explanation.details.0.value: 2} - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } - match: {hits.hits.0._explanation.details.0.details.0.details.0.description: "/ConstantScore.*/" } - - match: {hits.hits.0._explanation.details.1.value: 2} - - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } - match: {hits.hits.0._explanation.details.1.details.0.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } - - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/weight.*science.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/weight.*astronomy.*/" }