diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 3271007a00077..00af4d006d0ac 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.17", "8.11.4", "8.12.0", "8.13.0"] + BWC_VERSION: ["7.17.18", "8.12.1", "8.13.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 66eb1fc79e3ca..30d4f4486dad5 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1105,6 +1105,22 @@ steps: env: BWC_VERSION: 7.17.17 + - label: "{{matrix.image}} / 7.17.18 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.18 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.18 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 @@ -1809,6 +1825,22 @@ steps: env: BWC_VERSION: 8.12.0 + - label: "{{matrix.image}} / 8.12.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.1 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.12.1 + - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index faf904f2f8b04..0240fd03f4a89 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -89,6 +89,7 @@ steps: setup: image: - amazonlinux-2023 + - amazonlinux-2 agents: provider: aws imagePrefix: elasticsearch-{{matrix.image}} diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 40bc6277379f5..a92e190be7963 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -2,7 +2,7 @@ steps: - group: bwc steps: $BWC_STEPS - label: concurrent-search-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 agents: provider: gcp @@ -97,7 +97,7 @@ steps: diskSizeGb: 350 machineType: custom-32-98304 - label: single-processor-node-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests timeout_in_minutes: 420 agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 07bc1de57b752..44007272f8954 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -672,6 +672,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.17 + - label: 7.17.18 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.18#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.18 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 @@ -1112,6 +1122,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.0 + - label: 8.12.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.1#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.12.1 - label: 8.13.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest timeout_in_minutes: 300 @@ -1123,7 +1143,7 @@ steps: env: BWC_VERSION: 8.13.0 - label: concurrent-search-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 agents: provider: gcp @@ -1218,7 +1238,7 @@ steps: diskSizeGb: 350 machineType: custom-32-98304 - label: single-processor-node-tests - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests timeout_in_minutes: 420 agents: provider: gcp diff --git a/.buildkite/scripts/encryption-at-rest.sh b/.buildkite/scripts/encryption-at-rest.sh index 6aa0a3b174fe0..a003107cb00f6 100755 --- a/.buildkite/scripts/encryption-at-rest.sh +++ b/.buildkite/scripts/encryption-at-rest.sh @@ -22,4 +22,4 @@ touch .output.log rm -Rf "$WORKSPACE" ln -s "$PWD" "$WORKSPACE" -.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true check \ No newline at end of file +.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true functionalTests diff --git a/.buildkite/scripts/fixture-deploy.sh b/.buildkite/scripts/fixture-deploy.sh new file mode 100755 index 0000000000000..3c30b3a3176d2 --- /dev/null +++ b/.buildkite/scripts/fixture-deploy.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -euo pipefail + +echo "$DOCKER_REGISTRY_PASSWORD" | docker login -u "$DOCKER_REGISTRY_USERNAME" --password-stdin docker.elastic.co +unset DOCKER_REGISTRY_USERNAME DOCKER_REGISTRY_PASSWORD + +docker buildx create --use +.ci/scripts/run-gradle.sh deployFixtureDockerImages diff --git a/.buildkite/scripts/release-tests.sh b/.buildkite/scripts/release-tests.sh index aa5c50d2e87c1..e4185c642f244 100755 --- a/.buildkite/scripts/release-tests.sh +++ b/.buildkite/scripts/release-tests.sh @@ -20,4 +20,4 @@ curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/m curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT.zip .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false -Dbuild.ml_cpp.repo=file://${ML_IVY_REPO} \ - -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build \ No newline at end of file + -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef assemble functionalTests diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 569caf22ae830..3871c6d06fd23 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -66,6 +66,7 @@ BWC_VERSION: - "7.17.15" - "7.17.16" - "7.17.17" + - "7.17.18" - "8.0.0" - "8.0.1" - "8.1.0" @@ -110,4 +111,5 @@ BWC_VERSION: - "8.11.3" - "8.11.4" - "8.12.0" + - "8.12.1" - "8.13.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 98bfd6b50d24b..36c0eb5a2999c 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - - "7.17.17" - - "8.11.4" - - "8.12.0" + - "7.17.18" + - "8.12.1" - "8.13.0" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 64ad5c5c851e3..91a5039f5c5f7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,18 +3,18 @@ # For more info, see https://help.github.com/articles/about-codeowners/ # Stack Monitoring index templates -x-pack/plugin/core/template-resources/src/main/resources/monitoring-alerts-7.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-es.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash.json @elastic/infra-monitoring-ui -x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json @elastic/infra-monitoring-ui -x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-alerts-7.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-es.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash-mb.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash.json @elastic/stack-monitoring +x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json @elastic/stack-monitoring +x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @elastic/stack-monitoring # Fleet x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet @elastic/fleet diff --git a/TRACING.md b/TRACING.md index 6221305ef455b..82f9b0f52fd8b 100644 --- a/TRACING.md +++ b/TRACING.md @@ -24,7 +24,7 @@ In your `elasticsearch.yml` add the following configuration: ``` tracing.apm.enabled: true -tracing.apm.agent.server_url: https://:443 +telemetry.agent.server_url: https://:443 ``` When using a secret token to authenticate with the APM server, you must add it to the Elasticsearch keystore under `tracing.apm.secret_token`. For example, execute: @@ -34,7 +34,7 @@ When using a secret token to authenticate with the APM server, you must add it t then enter the token when prompted. If you are using API keys, change the keystore key name to `tracing.apm.api_key`. All APM settings live under `tracing.apm`. All settings related to the Java agent -go under `tracing.apm.agent`. Anything you set under there will be propagated to +go under `telemetry.agent`. Anything you set under there will be propagated to the agent. For agent settings that can be changed dynamically, you can use the cluster @@ -43,7 +43,7 @@ settings REST API. For example, to change the sampling rate: curl -XPUT \ -H "Content-type: application/json" \ -u "$USERNAME:$PASSWORD" \ - -d '{ "persistent": { "tracing.apm.agent.transaction_sample_rate": "0.75" } }' \ + -d '{ "persistent": { "telemetry.agent.transaction_sample_rate": "0.75" } }' \ https://localhost:9200/_cluster/settings diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java index e0281dbb856d4..49603043e7bcc 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java @@ -720,7 +720,7 @@ private static long computeBytesRefCheckSum(BytesRefBlock block, int[] traversal } private static long computeDoubleCheckSum(DoubleBlock block, int[] traversalOrder) { - double sum = 0; + long sum = 0; for (int position : traversalOrder) { if (block.isNull(position)) { @@ -729,11 +729,12 @@ private static long computeDoubleCheckSum(DoubleBlock block, int[] traversalOrde int start = block.getFirstValueIndex(position); int end = start + block.getValueCount(position); for (int i = start; i < end; i++) { - sum += block.getDouble(i); + // Use an operation that is not affected by rounding errors. Otherwise, the result may depend on the traversalOrder. + sum += (long) block.getDouble(i); } } - return (long) sum; + return sum; } private static long computeIntCheckSum(IntBlock block, int[] traversalOrder) { diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index dc517f257537a..4bb33937579c2 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -189,6 +189,11 @@ public String indexName() { return "benchmark"; } + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return MappedFieldType.FieldExtractPreference.NONE; + } + @Override public SearchLookup lookup() { throw new UnsupportedOperationException(); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/ShardsAvailabilityHealthIndicatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/ShardsAvailabilityHealthIndicatorBenchmark.java index ef834fad424e3..8c5de05a01648 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/ShardsAvailabilityHealthIndicatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/ShardsAvailabilityHealthIndicatorBenchmark.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -166,7 +167,7 @@ public void setUp() throws Exception { .build(); Settings settings = Settings.builder().put("node.name", ShardsAvailabilityHealthIndicatorBenchmark.class.getSimpleName()).build(); - ThreadPool threadPool = new ThreadPool(settings); + ThreadPool threadPool = new ThreadPool(settings, MeterRegistry.NOOP); ClusterService clusterService = new ClusterService( Settings.EMPTY, diff --git a/branches.json b/branches.json index b33bb30e77cc4..289928f13daf7 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.12" }, - { - "branch": "8.11" - }, { "branch": "7.17" } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationPrecommitPlugin.java index cf70cce6e166b..8bcb7f46475f9 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationPrecommitPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationPrecommitPlugin.java @@ -30,11 +30,11 @@ public TaskProvider createTask(Project project) { .register("validate" + publicationName + "Pom", PomValidationTask.class); validatePom.configure(t -> t.dependsOn(validateTask)); validateTask.configure(task -> { - GenerateMavenPom generateMavenPom = project.getTasks() + TaskProvider generateMavenPom = project.getTasks() .withType(GenerateMavenPom.class) - .getByName("generatePomFileFor" + publicationName + "Publication"); + .named("generatePomFileFor" + publicationName + "Publication"); task.dependsOn(generateMavenPom); - task.getPomFile().fileValue(generateMavenPom.getDestination()); + task.getPomFile().fileProvider(generateMavenPom.map(GenerateMavenPom::getDestination)); }); }); diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index a4e0e2389dbec..934d9f05d77a2 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -139,6 +139,10 @@ gradlePlugin { id = 'elasticsearch.java-module' implementationClass = 'org.elasticsearch.gradle.internal.ElasticsearchJavaModulePathPlugin' } + mrjar { + id = 'elasticsearch.mrjar' + implementationClass = 'org.elasticsearch.gradle.internal.MrjarPlugin' + } releaseTools { id = 'elasticsearch.release-tools' implementationClass = 'org.elasticsearch.gradle.internal.release.ReleaseToolsPlugin' diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy index 144307912101c..237aa99e4b824 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy @@ -8,6 +8,8 @@ package org.elasticsearch.gradle.internal +import spock.lang.Ignore + import org.apache.commons.compress.archivers.tar.TarArchiveEntry import org.apache.commons.compress.archivers.tar.TarArchiveInputStream import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream @@ -36,6 +38,11 @@ class SymbolicLinkPreservingTarFuncTest extends AbstractGradleFuncTest { final Path linkToRealFolder = archiveSourceRoot.resolve("link-to-real-folder"); Files.createSymbolicLink(linkToRealFolder, Paths.get("./real-folder")); + final Path realFolder2 = testProjectDir.getRoot().toPath().resolve("real-folder2") + final Path realFolderSub = realFolder2.resolve("sub") + Files.createDirectory(realFolder2); + Files.createDirectory(realFolderSub); + buildFile << """ import org.elasticsearch.gradle.internal.SymbolicLinkPreservingTar @@ -56,6 +63,12 @@ tasks.register("buildBZip2Tar", SymbolicLinkPreservingTar) { SymbolicLinkPreserv tar.compression = Compression.BZIP2 tar.preserveFileTimestamps = ${preserverTimestamp} from fileTree("archiveRoot") + + into('config') { + dirMode 0750 + fileMode 0660 + from "real-folder2" + } } """ when: @@ -118,14 +131,20 @@ tasks.register("buildTar", SymbolicLinkPreservingTar) { SymbolicLinkPreservingTa if (entry.getName().equals("real-folder/")) { assert entry.isDirectory() realFolderEntry = true - } else if (entry.getName().equals("real-folder/file")) { + } else if (entry.getName().equals("real-folder/file")) { assert entry.isFile() fileEntry = true } else if (entry.getName().equals("real-folder/link-to-file")) { assert entry.isSymbolicLink() assert normalized(entry.getLinkName()) == "./file" linkToFileEntry = true - } else if (entry.getName().equals("link-in-folder/")) { + } else if (entry.getName().equals("config/")) { + assert entry.isDirectory() + assert entry.getMode() == 16877 + } else if (entry.getName().equals("config/sub/")) { + assert entry.isDirectory() + assert entry.getMode() == 16872 + }else if (entry.getName().equals("link-in-folder/")) { assert entry.isDirectory() linkInFolderEntry = true } else if (entry.getName().equals("link-in-folder/link-to-file")) { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index aaae18401685a..f691d4bd996a7 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -79,7 +79,6 @@ if (BuildParams.inFipsJvm) { // with no x-pack. Tests having security explicitly enabled/disabled will override this setting setting 'xpack.security.enabled', 'false' setting 'xpack.security.fips_mode.enabled', 'true' - setting 'xpack.security.fips_mode.required_providers', '["BCFIPS", "BCJSSE"]' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.authc.password_hashing.algorithm', 'pbkdf2_stretch' keystorePassword 'keystore-password' diff --git a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle index 7c7c05facb2e1..f85ceed18604b 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle @@ -47,15 +47,6 @@ configure(allprojects) { } } } - - project.plugins.withType(RestTestBasePlugin) { - tasks.withType(StandaloneRestIntegTestTask).configureEach { - if (BuildParams.getIsRuntimeJavaHomeSet() == false) { - nonInputProperties.systemProperty("tests.runtime.java", "${-> launcher.map { it.metadata.installationPath.asFile.path }.get()}") - } - } - } - project.plugins.withType(ThirdPartyAuditPrecommitPlugin) { project.getTasks().withType(ThirdPartyAuditTask.class).configureEach { if (BuildParams.getIsRuntimeJavaHomeSet() == false) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java new file mode 100644 index 0000000000000..8b21826447b46 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal; + +import org.elasticsearch.gradle.util.GradleUtils; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.JavaLibraryPlugin; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.compile.CompileOptions; +import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.jvm.tasks.Jar; +import org.gradle.jvm.toolchain.JavaLanguageVersion; +import org.gradle.jvm.toolchain.JavaToolchainService; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.tree.ClassNode; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import javax.inject.Inject; + +import static org.objectweb.asm.Opcodes.V_PREVIEW; + +public class MrjarPlugin implements Plugin { + + private static final Pattern MRJAR_SOURCESET_PATTERN = Pattern.compile("main(\\d{2})"); + + private final JavaToolchainService javaToolchains; + + @Inject + MrjarPlugin(JavaToolchainService javaToolchains) { + this.javaToolchains = javaToolchains; + } + + @Override + public void apply(Project project) { + project.getPluginManager().apply(JavaLibraryPlugin.class); + var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); + + var srcDir = project.getProjectDir().toPath().resolve("src"); + try (var subdirStream = Files.list(srcDir)) { + for (Path sourceset : subdirStream.toList()) { + assert Files.isDirectory(sourceset); + String sourcesetName = sourceset.getFileName().toString(); + Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); + if (sourcesetMatcher.matches()) { + int javaVersion = Integer.parseInt(sourcesetMatcher.group(1)); + addMrjarSourceset(project, javaExtension, sourcesetName, javaVersion); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private void addMrjarSourceset(Project project, JavaPluginExtension javaExtension, String sourcesetName, int javaVersion) { + SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourcesetName); + GradleUtils.extendSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME, sourcesetName); + + project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME).configure(jarTask -> { + jarTask.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput())); + jarTask.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); + }); + + project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { + compileTask.getJavaCompiler() + .set(javaToolchains.compilerFor(spec -> { spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)); })); + compileTask.setSourceCompatibility(Integer.toString(javaVersion)); + CompileOptions compileOptions = compileTask.getOptions(); + compileOptions.getRelease().set(javaVersion); + compileOptions.getCompilerArgs().add("--enable-preview"); + compileOptions.getCompilerArgs().add("-Xlint:-preview"); + + compileTask.doLast(t -> { stripPreviewFromFiles(compileTask.getDestinationDirectory().getAsFile().get().toPath()); }); + }); + } + + private static void stripPreviewFromFiles(Path compileDir) { + try (Stream fileStream = Files.walk(compileDir)) { + fileStream.filter(p -> p.toString().endsWith(".class")).forEach(MrjarPlugin::maybeStripPreview); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static void maybeStripPreview(Path file) { + ClassWriter classWriter = null; + try (var in = Files.newInputStream(file)) { + ClassReader classReader = new ClassReader(in); + ClassNode classNode = new ClassNode(); + classReader.accept(classNode, 0); + + if ((classNode.version & V_PREVIEW) != 0) { + classNode.version = classNode.version & ~V_PREVIEW; + classWriter = new ClassWriter(0); + classNode.accept(classWriter); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + if (classWriter != null) { + try (var out = Files.newOutputStream(file)) { + out.write(classWriter.toByteArray()); + } catch (IOException e) { + throw new org.gradle.api.UncheckedIOException(e); + } + } + + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java index 20f46990815bd..b84a90259787e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java @@ -20,8 +20,10 @@ import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.MapProperty; import org.gradle.api.provider.Property; +import org.gradle.api.provider.SetProperty; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.PathSensitive; import org.gradle.api.tasks.PathSensitivity; @@ -36,6 +38,7 @@ import java.nio.file.Files; import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; import javax.inject.Inject; @@ -43,7 +46,7 @@ * This task wraps up the details of building a Docker image, including adding a pull * mechanism that can retry, and emitting the image SHA as a task output. */ -public class DockerBuildTask extends DefaultTask { +public abstract class DockerBuildTask extends DefaultTask { private static final Logger LOGGER = Logging.getLogger(DockerBuildTask.class); private final WorkerExecutor workerExecutor; @@ -55,7 +58,6 @@ public class DockerBuildTask extends DefaultTask { private boolean noCache = true; private String[] baseImages; private MapProperty buildArgs; - private Property platform; @Inject public DockerBuildTask(WorkerExecutor workerExecutor, ObjectFactory objectFactory, ProjectLayout projectLayout) { @@ -63,7 +65,6 @@ public DockerBuildTask(WorkerExecutor workerExecutor, ObjectFactory objectFactor this.markerFile = objectFactory.fileProperty(); this.dockerContext = objectFactory.directoryProperty(); this.buildArgs = objectFactory.mapProperty(String.class, String.class); - this.platform = objectFactory.property(String.class).convention(Architecture.current().dockerPlatform); this.markerFile.set(projectLayout.getBuildDirectory().file("markers/" + this.getName() + ".marker")); } @@ -75,9 +76,10 @@ public void build() { params.getTags().set(Arrays.asList(tags)); params.getPull().set(pull); params.getNoCache().set(noCache); + params.getPush().set(getPush().getOrElse(false)); params.getBaseImages().set(Arrays.asList(baseImages)); params.getBuildArgs().set(buildArgs); - params.getPlatform().set(platform); + params.getPlatforms().set(getPlatforms()); }); } @@ -129,10 +131,16 @@ public MapProperty getBuildArgs() { } @Input - public Property getPlatform() { - return platform; + public abstract SetProperty getPlatforms(); + + public void setPlatform(String platform) { + getPlatforms().set(Arrays.asList(platform)); } + @Input + @Optional + public abstract Property getPush(); + @OutputFile public RegularFileProperty getMarkerFile() { return markerFile; @@ -181,7 +189,7 @@ public void execute() { } final List tags = parameters.getTags().get(); - final boolean isCrossPlatform = parameters.getPlatform().get().equals(Architecture.current().dockerPlatform) == false; + final boolean isCrossPlatform = isCrossPlatform(); LoggedExec.exec(execOperations, spec -> { spec.executable("docker"); @@ -193,7 +201,7 @@ public void execute() { spec.args("build", parameters.getDockerContext().get().getAsFile().getAbsolutePath()); if (isCrossPlatform) { - spec.args("--platform", parameters.getPlatform().get()); + spec.args("--platform", parameters.getPlatforms().get().stream().collect(Collectors.joining(","))); } if (parameters.getNoCache().get()) { @@ -203,11 +211,20 @@ public void execute() { tags.forEach(tag -> spec.args("--tag", tag)); parameters.getBuildArgs().get().forEach((k, v) -> spec.args("--build-arg", k + "=" + v)); + + if (parameters.getPush().getOrElse(false)) { + spec.args("--push"); + } }); // Fetch the Docker image's hash, and write it to desk as the task's output. Doing this allows us // to do proper up-to-date checks in Gradle. try { + // multi-platform image builds do not end up in local registry, so we need to pull the just build image + // first to get the checksum and also serves as a test for the image being pushed correctly + if (parameters.getPlatforms().get().size() > 1 && parameters.getPush().getOrElse(false)) { + pullBaseImage(tags.get(0)); + } final String checksum = getImageChecksum(tags.get(0)); Files.writeString(parameters.getMarkerFile().getAsFile().get().toPath(), checksum + "\n"); } catch (IOException e) { @@ -215,6 +232,13 @@ public void execute() { } } + private boolean isCrossPlatform() { + return getParameters().getPlatforms() + .get() + .stream() + .anyMatch(any -> any.equals(Architecture.current().dockerPlatform) == false); + } + private String getImageChecksum(String imageTag) { final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); @@ -243,6 +267,8 @@ interface Parameters extends WorkParameters { MapProperty getBuildArgs(); - Property getPlatform(); + SetProperty getPlatforms(); + + Property getPush(); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java index 092230a2b12ea..f71973c2fb15c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java @@ -28,6 +28,7 @@ import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -187,7 +188,7 @@ public void checkDependencies() { } File licensesDirAsFile = licensesDir.get().getAsFile(); if (dependencies.isEmpty()) { - if (licensesDirAsFile.exists()) { + if (licensesDirAsFile.exists() && allIgnored() == false) { throw new GradleException("Licenses dir " + licensesDirAsFile + " exists, but there are no dependencies"); } return; // no dependencies to check @@ -227,6 +228,10 @@ public void checkDependencies() { sources.forEach((item, exists) -> failIfAnyMissing(item, exists, "sources")); } + private boolean allIgnored() { + return Arrays.asList(getLicensesDir().listFiles()).stream().map(f -> f.getName()).allMatch(ignoreFiles::contains); + } + // This is just a marker output folder to allow this task being up-to-date. // The check logic is exception driven so a failed tasks will not be defined // by this output but when successful we can safely mark the task as up-to-date. diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/AbstractVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/AbstractVersionsTask.java new file mode 100644 index 0000000000000..0ab3a9b917d65 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/AbstractVersionsTask.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.gradle.api.DefaultTask; +import org.gradle.initialization.layout.BuildLayout; + +import java.nio.file.Path; + +public abstract class AbstractVersionsTask extends DefaultTask { + + static final String TRANSPORT_VERSION_TYPE = "TransportVersion"; + static final String INDEX_VERSION_TYPE = "IndexVersion"; + + static final String SERVER_MODULE_PATH = "server/src/main/java/"; + static final String TRANSPORT_VERSION_FILE_PATH = SERVER_MODULE_PATH + "org/elasticsearch/TransportVersions.java"; + static final String INDEX_VERSION_FILE_PATH = SERVER_MODULE_PATH + "org/elasticsearch/index/IndexVersions.java"; + + static final String SERVER_RESOURCES_PATH = "server/src/main/resources/"; + static final String TRANSPORT_VERSIONS_RECORD = SERVER_RESOURCES_PATH + "org/elasticsearch/TransportVersions.csv"; + static final String INDEX_VERSIONS_RECORD = SERVER_RESOURCES_PATH + "org/elasticsearch/index/IndexVersions.csv"; + + final Path rootDir; + + protected AbstractVersionsTask(BuildLayout layout) { + rootDir = layout.getRootDirectory().toPath(); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTask.java new file mode 100644 index 0000000000000..3530d7ef9e807 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTask.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import com.github.javaparser.StaticJavaParser; +import com.github.javaparser.ast.CompilationUnit; +import com.github.javaparser.ast.body.FieldDeclaration; +import com.github.javaparser.ast.expr.IntegerLiteralExpr; + +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.options.Option; +import org.gradle.initialization.layout.BuildLayout; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import javax.inject.Inject; + +public class ExtractCurrentVersionsTask extends AbstractVersionsTask { + private static final Logger LOGGER = Logging.getLogger(ExtractCurrentVersionsTask.class); + + private Path outputFile; + + @Inject + public ExtractCurrentVersionsTask(BuildLayout layout) { + super(layout); + } + + @Option(option = "output-file", description = "File to output tag information to") + public void outputFile(String file) { + this.outputFile = Path.of(file); + } + + @TaskAction + public void executeTask() throws IOException { + if (outputFile == null) { + throw new IllegalArgumentException("Output file not specified"); + } + + LOGGER.lifecycle("Extracting latest version information"); + + List output = new ArrayList<>(); + int transportVersion = readLatestVersion(rootDir.resolve(TRANSPORT_VERSION_FILE_PATH)); + LOGGER.lifecycle("Transport version: {}", transportVersion); + output.add(TRANSPORT_VERSION_TYPE + ":" + transportVersion); + + int indexVersion = readLatestVersion(rootDir.resolve(INDEX_VERSION_FILE_PATH)); + LOGGER.lifecycle("Index version: {}", indexVersion); + output.add(INDEX_VERSION_TYPE + ":" + indexVersion); + + LOGGER.lifecycle("Writing version information to {}", outputFile); + Files.write(outputFile, output, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); + } + + static class FieldIdExtractor implements Consumer { + private Integer highestVersionId; + + Integer highestVersionId() { + return highestVersionId; + } + + @Override + public void accept(FieldDeclaration fieldDeclaration) { + var ints = fieldDeclaration.findAll(IntegerLiteralExpr.class); + switch (ints.size()) { + case 0 -> { + // No ints in the field declaration, ignore + } + case 1 -> { + int id = ints.get(0).asNumber().intValue(); + if (highestVersionId != null && highestVersionId > id) { + LOGGER.warn("Version ids [{}, {}] out of order", highestVersionId, id); + } else { + highestVersionId = id; + } + } + default -> LOGGER.warn("Multiple integers found in version field declaration [{}]", fieldDeclaration); // and ignore it + } + } + } + + private static int readLatestVersion(Path javaVersionsFile) throws IOException { + CompilationUnit java = StaticJavaParser.parse(javaVersionsFile); + + FieldIdExtractor extractor = new FieldIdExtractor(); + java.walk(FieldDeclaration.class, extractor); // walks in code file order + if (extractor.highestVersionId == null) { + throw new IllegalArgumentException("No version ids found in " + javaVersionsFile); + } + return extractor.highestVersionId; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java index 6c978edd48c29..8001b82797557 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java @@ -50,6 +50,9 @@ public void apply(Project project) { project.getTasks() .register("updateVersions", UpdateVersionsTask.class, t -> project.getTasks().named("spotlessApply").get().mustRunAfter(t)); + project.getTasks().register("extractCurrentVersions", ExtractCurrentVersionsTask.class); + project.getTasks().register("tagVersions", TagVersionsTask.class); + final FileTree yamlFiles = projectDirectory.dir("docs/changelog") .getAsFileTree() .matching(new PatternSet().include("**/*.yml", "**/*.yaml")); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TagVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TagVersionsTask.java new file mode 100644 index 0000000000000..fa11746543e82 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TagVersionsTask.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.elasticsearch.gradle.Version; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.options.Option; +import org.gradle.initialization.layout.BuildLayout; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +public class TagVersionsTask extends AbstractVersionsTask { + private static final Logger LOGGER = Logging.getLogger(TagVersionsTask.class); + + private Version releaseVersion; + + private Map tagVersions = Map.of(); + + @Inject + public TagVersionsTask(BuildLayout layout) { + super(layout); + } + + @Option(option = "release", description = "The release version to be tagged") + public void release(String version) { + releaseVersion = Version.fromString(version); + } + + @Option(option = "tag-version", description = "Version id to tag. Of the form :.") + public void tagVersions(List version) { + this.tagVersions = version.stream().map(l -> { + var split = l.split(":"); + if (split.length != 2) throw new IllegalArgumentException("Invalid tag format [" + l + "]"); + return split; + }).collect(Collectors.toMap(l -> l[0], l -> Integer.parseInt(l[1]))); + } + + @TaskAction + public void executeTask() throws IOException { + if (releaseVersion == null) { + throw new IllegalArgumentException("Release version not specified"); + } + if (tagVersions.isEmpty()) { + throw new IllegalArgumentException("No version tags specified"); + } + + LOGGER.lifecycle("Tagging version {} component ids", releaseVersion); + + var versions = expandV7Version(tagVersions); + + for (var v : versions.entrySet()) { + Path recordFile = switch (v.getKey()) { + case TRANSPORT_VERSION_TYPE -> rootDir.resolve(TRANSPORT_VERSIONS_RECORD); + case INDEX_VERSION_TYPE -> rootDir.resolve(INDEX_VERSIONS_RECORD); + default -> throw new IllegalArgumentException("Unknown version type " + v.getKey()); + }; + + LOGGER.lifecycle("Adding version record for {} to [{}]: [{},{}]", v.getKey(), recordFile, releaseVersion, v.getValue()); + + Path file = rootDir.resolve(recordFile); + List versionRecords = Files.readAllLines(file); + var modified = addVersionRecord(versionRecords, releaseVersion, v.getValue()); + if (modified.isPresent()) { + Files.write( + file, + modified.get(), + StandardOpenOption.CREATE, + StandardOpenOption.WRITE, + StandardOpenOption.TRUNCATE_EXISTING + ); + } + } + } + + /* + * V7 just extracts a single Version. If so, this version needs to be applied to transport and index versions. + */ + private static Map expandV7Version(Map tagVersions) { + Integer v7Version = tagVersions.get("Version"); + if (v7Version == null) return tagVersions; + + return Map.of(TRANSPORT_VERSION_TYPE, v7Version, INDEX_VERSION_TYPE, v7Version); + } + + private static final Pattern VERSION_LINE = Pattern.compile("(\\d+\\.\\d+\\.\\d+),(\\d+)"); + + static Optional> addVersionRecord(List versionRecordLines, Version release, int id) { + Map versions = versionRecordLines.stream().map(l -> { + Matcher m = VERSION_LINE.matcher(l); + if (m.matches() == false) throw new IllegalArgumentException(String.format("Incorrect format for line [%s]", l)); + return m; + }).collect(Collectors.toMap(m -> Version.fromString(m.group(1)), m -> Integer.parseInt(m.group(2)))); + + Integer existing = versions.putIfAbsent(release, id); + if (existing != null) { + if (existing.equals(id)) { + LOGGER.lifecycle("Version id [{}] for release [{}] already recorded", id, release); + return Optional.empty(); + } else { + throw new IllegalArgumentException( + String.format( + "Release [%s] already recorded with version id [%s], cannot update to version [%s]", + release, + existing, + id + ) + ); + } + } + + return Optional.of( + versions.entrySet().stream().sorted(Map.Entry.comparingByKey()).map(e -> e.getKey() + "," + e.getValue()).toList() + ); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java index f8073f384b871..9996ffe613545 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java @@ -8,6 +8,7 @@ package org.elasticsearch.gradle.internal.release; +import com.github.javaparser.GeneratedJavaParserConstants; import com.github.javaparser.StaticJavaParser; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.NodeList; @@ -15,6 +16,9 @@ import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.VariableDeclarator; import com.github.javaparser.ast.expr.NameExpr; +import com.github.javaparser.ast.observer.ObservableProperty; +import com.github.javaparser.printer.ConcreteSyntaxModel; +import com.github.javaparser.printer.concretesyntaxmodel.CsmElement; import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; import com.google.common.annotations.VisibleForTesting; @@ -27,6 +31,7 @@ import org.gradle.initialization.layout.BuildLayout; import java.io.IOException; +import java.lang.reflect.Field; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @@ -42,7 +47,84 @@ import javax.annotation.Nullable; import javax.inject.Inject; +import static com.github.javaparser.ast.observer.ObservableProperty.TYPE_PARAMETERS; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmConditional.Condition.FLAG; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.block; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.child; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.comma; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.comment; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.conditional; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.list; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.newline; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.none; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.sequence; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.space; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.string; +import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.token; + public class UpdateVersionsTask extends DefaultTask { + + static { + replaceDefaultJavaParserClassCsm(); + } + + /* + * The default JavaParser CSM which it uses to format any new declarations added to a class + * inserts two newlines after each declaration. Our version classes only have one newline. + * In order to get javaparser lexical printer to use our format, we have to completely replace + * the statically declared CSM pattern using hacky reflection + * to access the static map where these are stored, and insert a replacement that is identical + * apart from only one newline at the end of each member declaration, rather than two. + */ + private static void replaceDefaultJavaParserClassCsm() { + try { + Field classCsms = ConcreteSyntaxModel.class.getDeclaredField("concreteSyntaxModelByClass"); + classCsms.setAccessible(true); + @SuppressWarnings({ "unchecked", "rawtypes" }) + Map csms = (Map) classCsms.get(null); + + // copied from the static initializer in ConcreteSyntaxModel + csms.put( + ClassOrInterfaceDeclaration.class, + sequence( + comment(), + list(ObservableProperty.ANNOTATIONS, newline(), none(), newline()), + list(ObservableProperty.MODIFIERS, space(), none(), space()), + conditional( + ObservableProperty.INTERFACE, + FLAG, + token(GeneratedJavaParserConstants.INTERFACE), + token(GeneratedJavaParserConstants.CLASS) + ), + space(), + child(ObservableProperty.NAME), + list( + TYPE_PARAMETERS, + sequence(comma(), space()), + string(GeneratedJavaParserConstants.LT), + string(GeneratedJavaParserConstants.GT) + ), + list( + ObservableProperty.EXTENDED_TYPES, + sequence(string(GeneratedJavaParserConstants.COMMA), space()), + sequence(space(), token(GeneratedJavaParserConstants.EXTENDS), space()), + none() + ), + list( + ObservableProperty.IMPLEMENTED_TYPES, + sequence(string(GeneratedJavaParserConstants.COMMA), space()), + sequence(space(), token(GeneratedJavaParserConstants.IMPLEMENTS), space()), + none() + ), + space(), + block(sequence(newline(), list(ObservableProperty.MEMBERS, sequence(newline()/*, newline()*/), newline(), newline()))) + ) + ); + } catch (ReflectiveOperationException e) { + throw new AssertionError(e); + } + } + private static final Logger LOGGER = Logging.getLogger(UpdateVersionsTask.class); static final String SERVER_MODULE_PATH = "server/src/main/java/"; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index b51842bbdcbf7..6d43ad109c323 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -63,7 +63,6 @@ public class RestTestBasePlugin implements Plugin { private static final String TESTS_MAX_PARALLEL_FORKS_SYSPROP = "tests.max.parallel.forks"; - private static final String TESTS_RUNTIME_JAVA_SYSPROP = "tests.runtime.java"; private static final String DEFAULT_DISTRIBUTION_SYSPROP = "tests.default.distribution"; private static final String INTEG_TEST_DISTRIBUTION_SYSPROP = "tests.integ-test.distribution"; private static final String BWC_SNAPSHOT_DISTRIBUTION_SYSPROP_PREFIX = "tests.snapshot.distribution."; @@ -189,7 +188,6 @@ public void apply(Project project) { // Wire up integ-test distribution by default for all test tasks FileCollection extracted = integTestDistro.getExtracted(); nonInputSystemProperties.systemProperty(INTEG_TEST_DISTRIBUTION_SYSPROP, () -> extracted.getSingleFile().getPath()); - nonInputSystemProperties.systemProperty(TESTS_RUNTIME_JAVA_SYSPROP, BuildParams.getRuntimeJavaHome()); // Add `usesDefaultDistribution()` extension method to test tasks to indicate they require the default distro task.getExtensions().getExtraProperties().set("usesDefaultDistribution", new Closure(task) { diff --git a/build-tools-internal/src/main/resources/fips_java.policy b/build-tools-internal/src/main/resources/fips_java.policy index bbfc1caf7593a..c259b0bc908d8 100644 --- a/build-tools-internal/src/main/resources/fips_java.policy +++ b/build-tools-internal/src/main/resources/fips_java.policy @@ -18,3 +18,8 @@ grant { permission org.bouncycastle.crypto.CryptoServicesPermission "exportPrivateKey"; permission java.io.FilePermission "${javax.net.ssl.trustStore}", "read"; }; + +// rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { + permission java.net.SocketPermission "*", "connect"; +}; diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java index 1a9284276043c..b909970638753 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java @@ -63,8 +63,17 @@ public void execute(DependencyLicensesTask dependencyLicensesTask) { public void givenProjectWithLicensesDirButNoDependenciesThenShouldThrowException() throws Exception { expectedException.expect(GradleException.class); expectedException.expectMessage(containsString("exists, but there are no dependencies")); + getLicensesDir(project).mkdir(); + createFileIn(getLicensesDir(project), "groovy-LICENSE.txt", PERMISSIVE_LICENSE_TEXT); + task.get().checkDependencies(); + } + @Test + public void givenProjectWithLicensesDirButAllIgnoreFileAndNoDependencies() throws Exception { getLicensesDir(project).mkdir(); + String licenseFileName = "cloudcarbonfootprint-LICENSE.txt"; + createFileIn(getLicensesDir(project), licenseFileName, PERMISSIVE_LICENSE_TEXT); + task.get().ignoreFile(licenseFileName); task.get().checkDependencies(); } diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTaskTests.java new file mode 100644 index 0000000000000..1dd4675756f94 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ExtractCurrentVersionsTaskTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import com.github.javaparser.StaticJavaParser; +import com.github.javaparser.ast.body.FieldDeclaration; + +import org.elasticsearch.gradle.internal.release.ExtractCurrentVersionsTask.FieldIdExtractor; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class ExtractCurrentVersionsTaskTests { + + @Test + public void testFieldExtractor() { + var unit = StaticJavaParser.parse(""" + public class Version { + public static final Version V_1 = def(1); + public static final Version V_2 = def(2); + public static final Version V_3 = def(3); + + // ignore fields with no or more than one int + public static final Version REF = V_3; + public static final Version COMPUTED = compute(100, 200); + }"""); + + FieldIdExtractor extractor = new FieldIdExtractor(); + unit.walk(FieldDeclaration.class, extractor); + assertThat(extractor.highestVersionId(), is(3)); + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/TagVersionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/TagVersionsTaskTests.java new file mode 100644 index 0000000000000..28f92faf49b06 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/TagVersionsTaskTests.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.elasticsearch.gradle.Version; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; + +public class TagVersionsTaskTests { + + @Test + public void testAddLastRecord() { + List startingLines = List.of( + "8.0.0,6100", + "8.0.1,6100", + "8.1.0,6204", + "8.2.0,6234", + "8.3.0,6239", + "8.3.1,6260", + "8.4.0,6301" + ); + + var modified = TagVersionsTask.addVersionRecord(new ArrayList<>(startingLines), Version.fromString("8.4.1"), 6305); + assertThat(modified.isPresent(), is(true)); + + List expected = new ArrayList<>(startingLines); + expected.add("8.4.1,6305"); + expected.sort(Comparator.naturalOrder()); + assertThat(modified.get(), contains(expected.toArray())); + } + + @Test + public void testAddMiddleRecord() { + List startingLines = List.of( + "8.0.0,6100", + "8.0.1,6100", + "8.1.0,6204", + "8.2.0,6234", + "8.3.0,6239", + "8.3.1,6260", + "8.4.0,6301" + ); + + var modified = TagVersionsTask.addVersionRecord(new ArrayList<>(startingLines), Version.fromString("8.3.2"), 6280); + assertThat(modified.isPresent(), is(true)); + + List expected = new ArrayList<>(startingLines); + expected.add("8.3.2,6280"); + expected.sort(Comparator.naturalOrder()); + assertThat(modified.get(), contains(expected.toArray())); + } + + @Test + public void testIdempotent() { + List startingLines = List.of( + "8.0.0,6100", + "8.0.1,6100", + "8.1.0,6204", + "8.2.0,6234", + "8.3.1,6260", + "8.3.0,6239", + "8.4.0,6301" + ); + + var modified = TagVersionsTask.addVersionRecord(new ArrayList<>(startingLines), Version.fromString("8.4.0"), 6301); + assertThat(modified.isPresent(), is(false)); + } + + @Test + public void testFailsConflictingId() { + List startingLines = List.of( + "8.0.0,6100", + "8.0.1,6100", + "8.1.0,6204", + "8.2.0,6234", + "8.3.1,6260", + "8.3.0,6239", + "8.4.0,6301" + ); + + var ex = assertThrows( + IllegalArgumentException.class, + () -> TagVersionsTask.addVersionRecord(new ArrayList<>(startingLines), Version.fromString("8.4.0"), 6302) + ); + assertThat(ex.getMessage(), is("Release [8.4.0] already recorded with version id [6301], cannot update to version [6302]")); + } + + @Test + public void testFailsIncorrectFormat() { + List lines = List.of("8.0.,f4d2"); + + var ex = assertThrows( + IllegalArgumentException.class, + () -> TagVersionsTask.addVersionRecord(new ArrayList<>(lines), Version.fromString("1.0.0"), 1) + ); + assertThat(ex.getMessage(), is("Incorrect format for line [8.0.,f4d2]")); + } + + @Test + public void testFailsDuplicateVersions() { + List lines = List.of("8.0.0,100", "8.0.0,101"); + + var ex = assertThrows( + IllegalStateException.class, + () -> TagVersionsTask.addVersionRecord(new ArrayList<>(lines), Version.fromString("8.0.1"), 102) + ); + assertThat(ex.getMessage(), is("Duplicate key 8.0.0 (attempted merging values 100 and 101)")); + } +} diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a76f507079f2f..6d09bd5d9fcbf 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,8 +1,8 @@ elasticsearch = 8.13.0 -lucene = 9.9.1 +lucene = 9.9.2 bundled_jdk_vendor = openjdk -bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index ca2cbc09f7c2f..746a09d242761 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -201,7 +201,7 @@ public void beforeStart() { try { mockServer.start(); node.setting("telemetry.metrics.enabled", "true"); - node.setting("tracing.apm.agent.enabled", "true"); + node.setting("tracing.apm.enabled", "true"); node.setting("tracing.apm.agent.transaction_sample_rate", "0.10"); node.setting("tracing.apm.agent.metrics_interval", "10s"); node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); @@ -213,8 +213,8 @@ public void beforeStart() { // if metrics were not enabled explicitly for gradlew run we should disable them else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); - } else if (node.getSettingKeys().contains("tracing.apm.agent.enabled") == false) { // tracing - node.setting("tracing.apm.agent.enable", "false"); + } else if (node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing + node.setting("tracing.apm.enable", "false"); } } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java index 571efd88aafec..f587163b9324f 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -33,8 +34,8 @@ public class NoopPlugin extends Plugin implements ActionPlugin { - public static final ActionType NOOP_SEARCH_ACTION = new ActionType<>("mock:data/read/search", SearchResponse::new); - public static final ActionType NOOP_BULK_ACTION = new ActionType<>("mock:data/write/bulk", BulkResponse::new); + public static final ActionType NOOP_SEARCH_ACTION = new ActionType<>("mock:data/read/search"); + public static final ActionType NOOP_BULK_ACTION = new ActionType<>("mock:data/write/bulk"); @Override public List> getActions() { @@ -47,6 +48,7 @@ public class NoopPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java index 6c04cda6a66a9..6ad1bac8d6e32 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -54,6 +54,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String defaultRouting = request.param("routing"); String defaultPipeline = request.param("pipeline"); Boolean defaultRequireAlias = request.paramAsBoolean("require_alias", null); + Boolean defaultRequireDataStream = request.paramAsBoolean("require_data_stream", null); Boolean defaultListExecutedPipelines = request.paramAsBoolean("list_executed_pipelines", null); String waitForActiveShards = request.param("wait_for_active_shards"); @@ -69,6 +70,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC null, defaultPipeline, defaultRequireAlias, + defaultRequireDataStream, defaultListExecutedPipelines, true, request.getXContentType(), diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java deleted file mode 100644 index ab059682460f2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Results from the {@code string_stats} aggregation. - */ -public class ParsedStringStats extends ParsedAggregation { - private static final ParseField COUNT_FIELD = new ParseField("count"); - private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); - private static final ParseField MAX_LENGTH_FIELD = new ParseField("max_length"); - private static final ParseField AVG_LENGTH_FIELD = new ParseField("avg_length"); - private static final ParseField ENTROPY_FIELD = new ParseField("entropy"); - private static final ParseField DISTRIBUTION_FIELD = new ParseField("distribution"); - - private final long count; - private final int minLength; - private final int maxLength; - private final double avgLength; - private final double entropy; - private final boolean showDistribution; - private final Map distribution; - - private ParsedStringStats( - String name, - long count, - int minLength, - int maxLength, - double avgLength, - double entropy, - boolean showDistribution, - Map distribution - ) { - setName(name); - this.count = count; - this.minLength = minLength; - this.maxLength = maxLength; - this.avgLength = avgLength; - this.entropy = entropy; - this.showDistribution = showDistribution; - this.distribution = distribution; - } - - /** - * The number of non-empty fields counted. - */ - public long getCount() { - return count; - } - - /** - * The length of the shortest term. - */ - public int getMinLength() { - return minLength; - } - - /** - * The length of the longest term. - */ - public int getMaxLength() { - return maxLength; - } - - /** - * The average length computed over all terms. - */ - public double getAvgLength() { - return avgLength; - } - - /** - * The Shannon Entropy - * value computed over all terms collected by the aggregation. - * Shannon entropy quantifies the amount of information contained in - * the field. It is a very useful metric for measuring a wide range of - * properties of a data set, such as diversity, similarity, - * randomness etc. - */ - public double getEntropy() { - return entropy; - } - - /** - * The probability distribution for all characters. {@code null} unless - * explicitly requested with {@link StringStatsAggregationBuilder#showDistribution(boolean)}. - */ - public Map getDistribution() { - return distribution; - } - - @Override - public String getType() { - return StringStatsAggregationBuilder.NAME; - } - - private static final Object NULL_DISTRIBUTION_MARKER = new Object(); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - StringStatsAggregationBuilder.NAME, - true, - (args, name) -> { - long count = (long) args[0]; - boolean disributionWasExplicitNull = args[5] == NULL_DISTRIBUTION_MARKER; - if (count == 0) { - return new ParsedStringStats(name, count, 0, 0, 0, 0, disributionWasExplicitNull, null); - } - int minLength = (int) args[1]; - int maxLength = (int) args[2]; - double averageLength = (double) args[3]; - double entropy = (double) args[4]; - if (disributionWasExplicitNull) { - return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy, disributionWasExplicitNull, null); - } else { - @SuppressWarnings("unchecked") - Map distribution = (Map) args[5]; - return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy, distribution != null, distribution); - } - } - ); - static { - PARSER.declareLong(constructorArg(), COUNT_FIELD); - PARSER.declareIntOrNull(constructorArg(), 0, MIN_LENGTH_FIELD); - PARSER.declareIntOrNull(constructorArg(), 0, MAX_LENGTH_FIELD); - PARSER.declareDoubleOrNull(constructorArg(), 0, AVG_LENGTH_FIELD); - PARSER.declareDoubleOrNull(constructorArg(), 0, ENTROPY_FIELD); - PARSER.declareObjectOrNull( - optionalConstructorArg(), - (p, c) -> unmodifiableMap(p.map(HashMap::new, XContentParser::doubleValue)), - NULL_DISTRIBUTION_MARKER, - DISTRIBUTION_FIELD - ); - ParsedAggregation.declareAggregationFields(PARSER); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(COUNT_FIELD.getPreferredName(), count); - if (count == 0) { - builder.nullField(MIN_LENGTH_FIELD.getPreferredName()); - builder.nullField(MAX_LENGTH_FIELD.getPreferredName()); - builder.nullField(AVG_LENGTH_FIELD.getPreferredName()); - builder.field(ENTROPY_FIELD.getPreferredName(), 0.0); - } else { - builder.field(MIN_LENGTH_FIELD.getPreferredName(), minLength); - builder.field(MAX_LENGTH_FIELD.getPreferredName(), maxLength); - builder.field(AVG_LENGTH_FIELD.getPreferredName(), avgLength); - builder.field(ENTROPY_FIELD.getPreferredName(), entropy); - } - if (showDistribution) { - builder.field(DISTRIBUTION_FIELD.getPreferredName(), distribution); - } - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java deleted file mode 100644 index 994c3411ce081..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Results of the {@code top_metrics} aggregation. - */ -public class ParsedTopMetrics extends ParsedAggregation { - private static final ParseField TOP_FIELD = new ParseField("top"); - - private final List topMetrics; - - private ParsedTopMetrics(String name, List topMetrics) { - setName(name); - this.topMetrics = topMetrics; - } - - /** - * The list of top metrics, in sorted order. - */ - public List getTopMetrics() { - return topMetrics; - } - - @Override - public String getType() { - return TopMetricsAggregationBuilder.NAME; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.startArray(TOP_FIELD.getPreferredName()); - for (TopMetrics top : topMetrics) { - top.toXContent(builder, params); - } - return builder.endArray(); - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TopMetricsAggregationBuilder.NAME, - true, - (args, name) -> { - @SuppressWarnings("unchecked") - List topMetrics = (List) args[0]; - return new ParsedTopMetrics(name, topMetrics); - } - ); - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TopMetrics.PARSER.parse(p, null), TOP_FIELD); - ParsedAggregation.declareAggregationFields(PARSER); - } - - /** - * The metrics belonging to the document with the "top" sort key. - */ - public static class TopMetrics implements ToXContent { - private static final ParseField SORT_FIELD = new ParseField("sort"); - private static final ParseField METRICS_FIELD = new ParseField("metrics"); - - private final List sort; - private final Map metrics; - - private TopMetrics(List sort, Map metrics) { - this.sort = sort; - this.metrics = metrics; - } - - /** - * The sort key for these top metrics. - */ - public List getSort() { - return sort; - } - - /** - * The top metric values returned by the aggregation. - */ - public Map getMetrics() { - return metrics; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "top", - true, - (args, name) -> { - @SuppressWarnings("unchecked") - List sort = (List) args[0]; - @SuppressWarnings("unchecked") - Map metrics = (Map) args[1]; - return new TopMetrics(sort, metrics); - } - ); - static { - PARSER.declareFieldArray( - constructorArg(), - (p, c) -> XContentParserUtils.parseFieldsValue(p), - SORT_FIELD, - ObjectParser.ValueType.VALUE_ARRAY - ); - PARSER.declareObject(constructorArg(), (p, c) -> p.map(), METRICS_FIELD); - } - - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(SORT_FIELD.getPreferredName(), sort); - builder.field(METRICS_FIELD.getPreferredName(), metrics); - builder.endObject(); - return builder; - }; - } -} diff --git a/client/rest/src/main/java/org/elasticsearch/client/Request.java b/client/rest/src/main/java/org/elasticsearch/client/Request.java index 6423bee1cb44e..8195e3fdc8a79 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Request.java @@ -67,7 +67,7 @@ public String getEndpoint() { /** * Add a query string parameter. * @param name the name of the url parameter. Must not be null. - * @param value the value of the url url parameter. If {@code null} then + * @param value the value of the url parameter. If {@code null} then * the parameter is sent as {@code name} rather than {@code name=value} * @throws IllegalArgumentException if a parameter with that name has * already been set diff --git a/client/test/build.gradle b/client/test/build.gradle index 9ee222b036cd1..d9a10a9c6ffdc 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -27,9 +27,9 @@ dependencies { api "org.hamcrest:hamcrest:${versions.hamcrest}" // mockito - api 'org.mockito:mockito-core:5.4.0' - api 'org.mockito:mockito-subclass:5.4.0' - api 'net.bytebuddy:byte-buddy:1.14.5' + api 'org.mockito:mockito-core:5.9.0' + api 'org.mockito:mockito-subclass:5.9.0' + api 'net.bytebuddy:byte-buddy:1.14.11' api 'org.objenesis:objenesis:3.3' } diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 96e577d5635ab..a3bb202780c7a 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -396,9 +396,9 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { dockerContext.fileProvider(transformTask.map { Sync task -> task.getDestinationDir() }) - noCache = BuildParams.isCi + noCache = BuildParams.isCi() tags = generateTags(base, architecture) - platform = architecture.dockerPlatform + platforms.add(architecture.dockerPlatform) // We don't build the Iron Bank image when we release Elasticsearch, as there's // separate process for submitting new releases. However, for testing we do a @@ -465,10 +465,10 @@ void addBuildEssDockerImageTask(Architecture architecture) { dockerContext.fileProvider(buildContextTask.map { it.getDestinationDir() }) - noCache = BuildParams.isCi + noCache = BuildParams.isCi() baseImages = [] tags = generateTags(base, architecture) - platform = architecture.dockerPlatform + platforms.add(architecture.dockerPlatform) onlyIf("$architecture supported") { isArchitectureSupported(architecture) } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 9dcd630f52631..67f0b571092ff 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -79,7 +79,8 @@ class APMJvmOptions { "application_packages", "org.elasticsearch,org.apache.lucene", "metrics_interval", "120s", "breakdown_metrics", "false", - "central_config", "false" + "central_config", "false", + "transaction_sample_rate", "0.2" ); // end::noformat @@ -181,14 +182,24 @@ static String agentCommandLineOption(Path agentJar, Path tmpPropertiesFile) { return "-javaagent:" + agentJar + "=c=" + tmpPropertiesFile; } - private static void extractSecureSettings(SecureSettings secrets, Map propertiesMap) { + // package private for testing + static void extractSecureSettings(SecureSettings secrets, Map propertiesMap) { final Set settingNames = secrets.getSettingNames(); for (String key : List.of("api_key", "secret_token")) { - if (settingNames.contains("tracing.apm." + key)) { - try (SecureString token = secrets.getString("tracing.apm." + key)) { - propertiesMap.put(key, token.toString()); + for (String prefix : List.of("telemetry.", "tracing.apm.")) { + if (settingNames.contains(prefix + key)) { + if (propertiesMap.containsKey(key)) { + throw new IllegalStateException( + Strings.format("Duplicate telemetry setting: [telemetry.%s] and [tracing.apm.%s]", key, key) + ); + } + + try (SecureString token = secrets.getString(prefix + key)) { + propertiesMap.put(key, token.toString()); + } } } + } } @@ -215,26 +226,44 @@ private static Map extractDynamicSettings(Map pr static Map extractApmSettings(Settings settings) throws UserException { final Map propertiesMap = new HashMap<>(); - final Settings agentSettings = settings.getByPrefix("tracing.apm.agent."); - agentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(agentSettings.get(key)))); + // tracing.apm.agent. is deprecated by telemetry.agent. + final String telemetryAgentPrefix = "telemetry.agent."; + final String deprecatedTelemetryAgentPrefix = "tracing.apm.agent."; - // special handling of global labels, the agent expects them in format: key1=value1,key2=value2 - final Settings globalLabelsSettings = settings.getByPrefix("tracing.apm.agent.global_labels."); - final StringJoiner globalLabels = new StringJoiner(","); + final Settings telemetryAgentSettings = settings.getByPrefix(telemetryAgentPrefix); + telemetryAgentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(telemetryAgentSettings.get(key)))); - for (var globalLabel : globalLabelsSettings.keySet()) { - // remove the individual label from the properties map, they are harmless, but we shouldn't be passing - // something to the agent it doesn't understand. - propertiesMap.remove("global_labels." + globalLabel); - var globalLabelValue = globalLabelsSettings.get(globalLabel); - if (Strings.isNullOrBlank(globalLabelValue) == false) { - // sanitize for the agent labels separators in case the global labels passed in have , or = - globalLabelValue = globalLabelValue.replaceAll("[,=]", "_"); - // append to the global labels string - globalLabels.add(String.join("=", globalLabel, globalLabelValue)); + final Settings apmAgentSettings = settings.getByPrefix(deprecatedTelemetryAgentPrefix); + for (String key : apmAgentSettings.keySet()) { + if (propertiesMap.containsKey(key)) { + throw new IllegalStateException( + Strings.format( + "Duplicate telemetry setting: [%s%s] and [%s%s]", + telemetryAgentPrefix, + key, + deprecatedTelemetryAgentPrefix, + key + ) + ); } + propertiesMap.put(key, String.valueOf(apmAgentSettings.get(key))); } + StringJoiner globalLabels = extractGlobalLabels(telemetryAgentPrefix, propertiesMap, settings); + if (globalLabels.length() == 0) { + globalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); + } else { + StringJoiner tracingGlobalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); + if (tracingGlobalLabels.length() != 0) { + throw new IllegalArgumentException( + "Cannot have global labels with tracing.agent prefix [" + + globalLabels + + "] and telemetry.apm.agent prefix [" + + tracingGlobalLabels + + "]" + ); + } + } if (globalLabels.length() > 0) { propertiesMap.put("global_labels", globalLabels.toString()); } @@ -255,6 +284,26 @@ static Map extractApmSettings(Settings settings) throws UserExce return propertiesMap; } + private static StringJoiner extractGlobalLabels(String prefix, Map propertiesMap, Settings settings) { + // special handling of global labels, the agent expects them in format: key1=value1,key2=value2 + final Settings globalLabelsSettings = settings.getByPrefix(prefix + "global_labels."); + final StringJoiner globalLabels = new StringJoiner(","); + + for (var globalLabel : globalLabelsSettings.keySet()) { + // remove the individual label from the properties map, they are harmless, but we shouldn't be passing + // something to the agent it doesn't understand. + propertiesMap.remove("global_labels." + globalLabel); + var globalLabelValue = globalLabelsSettings.get(globalLabel); + if (Strings.isNullOrBlank(globalLabelValue) == false) { + // sanitize for the agent labels separators in case the global labels passed in have , or = + globalLabelValue = globalLabelValue.replaceAll("[,=]", "_"); + // append to the global labels string + globalLabels.add(String.join("=", globalLabel, globalLabelValue)); + } + } + return globalLabels; + } + // package private for testing static Path createTemporaryPropertiesFile(Path tmpdir) throws IOException { return Files.createTempFile(tmpdir, ".elstcapm.", ".tmp"); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java index ebba7fc357b9f..6e337b0b61845 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.server.cli; import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.Node; @@ -20,15 +21,21 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -73,47 +80,112 @@ public void testFileDeleteWorks() throws IOException { assertFalse(Files.exists(tempFile)); } - public void testExtractSettings() throws UserException { - Settings settings = Settings.builder() - .put("tracing.apm.enabled", true) - .put("tracing.apm.agent.server_url", "https://myurl:443") - .put("tracing.apm.agent.service_node_name", "instance-0000000001") - .put("tracing.apm.agent.global_labels.deployment_id", "123") - .put("tracing.apm.agent.global_labels.deployment_name", "APM Tracing") - .put("tracing.apm.agent.global_labels.organization_id", "456") - .build(); + public void testExtractSecureSettings() { + MockSecureSettings duplicateSecureSettings = new MockSecureSettings(); - var extracted = APMJvmOptions.extractApmSettings(settings); + for (String prefix : List.of("telemetry.", "tracing.apm.")) { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(prefix + "secret_token", "token"); + secureSettings.setString(prefix + "api_key", "key"); - assertThat( - extracted, - allOf( - hasEntry("server_url", "https://myurl:443"), - hasEntry("service_node_name", "instance-0000000001"), - hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one - not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys - ) + duplicateSecureSettings.setString(prefix + "api_key", "secret"); + + Map propertiesMap = new HashMap<>(); + APMJvmOptions.extractSecureSettings(secureSettings, propertiesMap); + + assertThat(propertiesMap, matchesMap(Map.of("secret_token", "token", "api_key", "key"))); + } + + Exception exception = expectThrows( + IllegalStateException.class, + () -> APMJvmOptions.extractSecureSettings(duplicateSecureSettings, new HashMap<>()) ); + assertThat(exception.getMessage(), containsString("Duplicate telemetry setting")); + assertThat(exception.getMessage(), containsString("telemetry.api_key")); + assertThat(exception.getMessage(), containsString("tracing.apm.api_key")); - List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + } - assertThat(labels, hasSize(3)); - assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=456", "deployment_id=123")); + public void testExtractSettings() throws UserException { + Function buildSettings = (prefix) -> Settings.builder() + .put("tracing.apm.enabled", true) + .put(prefix + "server_url", "https://myurl:443") + .put(prefix + "service_node_name", "instance-0000000001"); + + for (String prefix : List.of("tracing.apm.agent.", "telemetry.agent.")) { + var name = "APM Tracing"; + var deploy = "123"; + var org = "456"; + var extracted = APMJvmOptions.extractApmSettings( + buildSettings.apply(prefix) + .put(prefix + "global_labels.deployment_name", name) + .put(prefix + "global_labels.deployment_id", deploy) + .put(prefix + "global_labels.organization_id", org) + .build() + ); + + assertThat( + extracted, + allOf( + hasEntry("server_url", "https://myurl:443"), + hasEntry("service_node_name", "instance-0000000001"), + hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one + not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys + ) + ); + + List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(3)); + assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=" + org, "deployment_id=" + deploy)); + + // test replacing with underscores and skipping empty + name = "APM=Tracing"; + deploy = ""; + org = ",456"; + extracted = APMJvmOptions.extractApmSettings( + buildSettings.apply(prefix) + .put(prefix + "global_labels.deployment_name", name) + .put(prefix + "global_labels.deployment_id", deploy) + .put(prefix + "global_labels.organization_id", org) + .build() + ); + labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(2)); + assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); + } + + IllegalStateException err = expectThrows( + IllegalStateException.class, + () -> APMJvmOptions.extractApmSettings( + Settings.builder() + .put("tracing.apm.enabled", true) + .put("tracing.apm.agent.server_url", "https://myurl:443") + .put("telemetry.agent.server_url", "https://myurl-2:443") + .build() + ) + ); + assertThat(err.getMessage(), is("Duplicate telemetry setting: [telemetry.agent.server_url] and [tracing.apm.agent.server_url]")); + } - settings = Settings.builder() + public void testNoMixedLabels() { + String telemetryAgent = "telemetry.agent."; + String tracingAgent = "tracing.apm.agent."; + Settings settings = Settings.builder() .put("tracing.apm.enabled", true) - .put("tracing.apm.agent.server_url", "https://myurl:443") - .put("tracing.apm.agent.service_node_name", "instance-0000000001") - .put("tracing.apm.agent.global_labels.deployment_id", "") - .put("tracing.apm.agent.global_labels.deployment_name", "APM=Tracing") - .put("tracing.apm.agent.global_labels.organization_id", ",456") + .put(telemetryAgent + "server_url", "https://myurl:443") + .put(telemetryAgent + "service_node_name", "instance-0000000001") + .put(tracingAgent + "global_labels.deployment_id", "123") + .put(telemetryAgent + "global_labels.organization_id", "456") .build(); - extracted = APMJvmOptions.extractApmSettings(settings); - - labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); - assertThat(labels, hasSize(2)); - assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); + IllegalArgumentException err = assertThrows(IllegalArgumentException.class, () -> APMJvmOptions.extractApmSettings(settings)); + assertThat( + err.getMessage(), + is( + "Cannot have global labels with tracing.agent prefix [organization_id=456] and" + + " telemetry.apm.agent prefix [deployment_id=123]" + ) + ); } private Path makeFakeAgentJar() throws IOException { diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 420ee36359745..13c1154a5a8be 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.9.1 -:lucene_version_path: 9_9_1 +:lucene_version: 9.9.2 +:lucene_version_path: 9_9_2 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/build.gradle b/docs/build.gradle index b6f696f0aae6a..e38b0129b219e 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1787,6 +1787,28 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' "username": "jacknich", "password" : "l0ng-r4nd0m-p@ssw0rd" } +''' + setups['sandrakn_user'] = ''' + - do: + security.put_user: + username: "sandrakn" + body: > + { + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Sandra Knight", + "email" : "sandrakn@example.com", + "metadata" : { "intelligence" : 7 } + } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "sandrakn", + "password" : "l0ng-r4nd0m-p@ssw0rd" + } + ''' setups['app0102_privileges'] = ''' - do: diff --git a/docs/changelog/100031.yaml b/docs/changelog/100031.yaml deleted file mode 100644 index 32aa51d2f9de6..0000000000000 --- a/docs/changelog/100031.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100031 -summary: Add executed pipelines to bulk api response -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/100033.yaml b/docs/changelog/100033.yaml deleted file mode 100644 index 92ef6cd289fdc..0000000000000 --- a/docs/changelog/100033.yaml +++ /dev/null @@ -1,9 +0,0 @@ -pr: 100033 -summary: "[Behavioral Analytics] Analytics collections use Data Stream Lifecycle (DSL)\ - \ instead of Index Lifecycle Management (ILM) for data retention management. Behavioral\ - \ analytics has traditionally used ILM to manage data retention. Starting with 8.12.0,\ - \ this will change. Analytics collections created prior to 8.12.0 will continue to use\ - \ their existing ILM policies, but new analytics collections will be managed using DSL." -area: Application -type: feature -issues: [ ] diff --git a/docs/changelog/100236.yaml b/docs/changelog/100236.yaml deleted file mode 100644 index b33825f9bc553..0000000000000 --- a/docs/changelog/100236.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100236 -summary: Record operation purpose for s3 stats collection -area: Distributed -type: enhancement -issues: [] diff --git a/docs/changelog/100287.yaml b/docs/changelog/100287.yaml deleted file mode 100644 index b92855a3342e2..0000000000000 --- a/docs/changelog/100287.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100287 -summary: Add an assertion to the testTransformFeatureReset test case -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/100316.yaml b/docs/changelog/100316.yaml deleted file mode 100644 index 9efb64a332dc1..0000000000000 --- a/docs/changelog/100316.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100316 -summary: Parallelize stale index deletion -area: Snapshot/Restore -type: enhancement -issues: - - 61513 diff --git a/docs/changelog/100333.yaml b/docs/changelog/100333.yaml deleted file mode 100644 index 96a2a62deffe5..0000000000000 --- a/docs/changelog/100333.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100333 -summary: Enable Universal Profiling as Enterprise feature -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/100368.yaml b/docs/changelog/100368.yaml deleted file mode 100644 index 2b9d8dc0b2044..0000000000000 --- a/docs/changelog/100368.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100368 -summary: "Status codes for Aggregation errors, part 2" -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/100383.yaml b/docs/changelog/100383.yaml deleted file mode 100644 index 6cda66149b2cc..0000000000000 --- a/docs/changelog/100383.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100383 -summary: Push s3 requests count via metrics API -area: Distributed -type: enhancement -issues: [] diff --git a/docs/changelog/100392.yaml b/docs/changelog/100392.yaml deleted file mode 100644 index ab693d5ae04ce..0000000000000 --- a/docs/changelog/100392.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100392 -summary: Prevent resource over-subscription in model allocation planner -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/100408.yaml b/docs/changelog/100408.yaml deleted file mode 100644 index 275c3b4a0de48..0000000000000 --- a/docs/changelog/100408.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100408 -summary: "ESQL: Make blocks ref counted" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/100466.yaml b/docs/changelog/100466.yaml deleted file mode 100644 index aaa30876ddfdf..0000000000000 --- a/docs/changelog/100466.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100466 -summary: "Introduce includeShardsStats in the stats request to indicate that we only fetch a summary" -area: Stats -type: enhancement -issues: [99744] diff --git a/docs/changelog/100492.yaml b/docs/changelog/100492.yaml deleted file mode 100644 index e0a1020b49488..0000000000000 --- a/docs/changelog/100492.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100492 -summary: Add runtime field of type `geo_shape` -area: Geo -type: enhancement -issues: - - 61299 diff --git a/docs/changelog/100519.yaml b/docs/changelog/100519.yaml deleted file mode 100644 index 086c6962b3a95..0000000000000 --- a/docs/changelog/100519.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100519 -summary: Disallow vectors whose magnitudes will not fit in a float -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/100565.yaml b/docs/changelog/100565.yaml deleted file mode 100644 index 066e9bbb4b227..0000000000000 --- a/docs/changelog/100565.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100565 -summary: "[Monitoring] Dont get cluster state until recovery" -area: Monitoring -type: bug -issues: [] diff --git a/docs/changelog/100570.yaml b/docs/changelog/100570.yaml deleted file mode 100644 index b68a905b0e046..0000000000000 --- a/docs/changelog/100570.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100570 -summary: Added metric for cache eviction of entries with non zero frequency -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/100609.yaml b/docs/changelog/100609.yaml deleted file mode 100644 index c1c63c1af5d4d..0000000000000 --- a/docs/changelog/100609.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100609 -summary: Fix metric gauge creation model -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/100642.yaml b/docs/changelog/100642.yaml deleted file mode 100644 index 805a20174e11d..0000000000000 --- a/docs/changelog/100642.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100642 -summary: "ESQL: Alias duplicated aggregations in a stats" -area: ES|QL -type: enhancement -issues: - - 100544 diff --git a/docs/changelog/100646.yaml b/docs/changelog/100646.yaml deleted file mode 100644 index 63958ff18c4df..0000000000000 --- a/docs/changelog/100646.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100646 -summary: Support complex datemath expressions in index and index alias names -area: Search -type: bug -issues: [] diff --git a/docs/changelog/100776.yaml b/docs/changelog/100776.yaml deleted file mode 100644 index a0bde13f47c92..0000000000000 --- a/docs/changelog/100776.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100776 -summary: Health Report API should not return RED for unassigned cold/frozen shards - when data is available -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/100826.yaml b/docs/changelog/100826.yaml deleted file mode 100644 index 1b1729d1491ea..0000000000000 --- a/docs/changelog/100826.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 100826 -summary: Fix geo tile bounding boxes to be consistent with arithmetic method -area: Geo -type: bug -issues: - - 92611 - - 95574 diff --git a/docs/changelog/100828.yaml b/docs/changelog/100828.yaml deleted file mode 100644 index 6271a1cf2a0a9..0000000000000 --- a/docs/changelog/100828.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100828 -summary: Consider task cancelled exceptions as recoverable -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/100862.yaml b/docs/changelog/100862.yaml deleted file mode 100644 index ce9f119203d9d..0000000000000 --- a/docs/changelog/100862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100862 -summary: Sending an index name to `DocumentParsingObserver` that is not ever null -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/100899.yaml b/docs/changelog/100899.yaml deleted file mode 100644 index 988546bb22cbe..0000000000000 --- a/docs/changelog/100899.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100899 -summary: Add methods for adding generation listeners with primary term -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/100921.yaml b/docs/changelog/100921.yaml deleted file mode 100644 index e6e2caa93d465..0000000000000 --- a/docs/changelog/100921.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100921 -summary: "Add support for Serbian Language Analyzer" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/100938.yaml b/docs/changelog/100938.yaml deleted file mode 100644 index b21f6955c992e..0000000000000 --- a/docs/changelog/100938.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100938 -summary: "Set includeShardsStats = false in NodesStatsRequest where the caller does not use shards-level statistics" -area: Stats -type: enhancement -issues: [] diff --git a/docs/changelog/100974.yaml b/docs/changelog/100974.yaml deleted file mode 100644 index e5d3a4ad3c9df..0000000000000 --- a/docs/changelog/100974.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100974 -summary: Create new cluster state API for querying features present on a cluster -area: "Infra/Core" -type: feature -issues: [] diff --git a/docs/changelog/100990.yaml b/docs/changelog/100990.yaml deleted file mode 100644 index 21b6fb93655cc..0000000000000 --- a/docs/changelog/100990.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 100990 -summary: Add status code to `rest.suppressed` log output -area: "Infra/Logging" -type: enhancement -issues: [] diff --git a/docs/changelog/101024.yaml b/docs/changelog/101024.yaml deleted file mode 100644 index edbd3d834526c..0000000000000 --- a/docs/changelog/101024.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101024 -summary: More consistent logging messages for snapshot deletion -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/101026.yaml b/docs/changelog/101026.yaml deleted file mode 100644 index cee85a722d7fa..0000000000000 --- a/docs/changelog/101026.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101026 -summary: Remove `auto_configure` privilege for profiling -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/101032.yaml b/docs/changelog/101032.yaml deleted file mode 100644 index 1c69e372704ce..0000000000000 --- a/docs/changelog/101032.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101032 -summary: Throw when wrapping rate agg in `DeferableBucketAggregator` -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/101050.yaml b/docs/changelog/101050.yaml deleted file mode 100644 index 1a68466e6e728..0000000000000 --- a/docs/changelog/101050.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101050 -summary: Ensure the correct `threadContext` for `RemoteClusterNodesAction` -area: Network -type: bug -issues: [] diff --git a/docs/changelog/101055.yaml b/docs/changelog/101055.yaml deleted file mode 100644 index e4ca4548c2ef6..0000000000000 --- a/docs/changelog/101055.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101055 -summary: Make tasks that calculate checkpoints time out -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/101057.yaml b/docs/changelog/101057.yaml deleted file mode 100644 index 2024c714f58b0..0000000000000 --- a/docs/changelog/101057.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101057 -summary: Add error logging for *QL -area: EQL -type: enhancement -issues: [] diff --git a/docs/changelog/101066.yaml b/docs/changelog/101066.yaml deleted file mode 100644 index 2fac601d65674..0000000000000 --- a/docs/changelog/101066.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101066 -summary: Log errors in `RestResponse` regardless of `error_trace` parameter -area: "Infra/Core" -type: enhancement -issues: - - 100884 diff --git a/docs/changelog/101093.yaml b/docs/changelog/101093.yaml deleted file mode 100644 index 99765170dd257..0000000000000 --- a/docs/changelog/101093.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101093 -summary: Make IPAddress writeable -area: Infra/Scripting -type: bug -issues: - - 101082 diff --git a/docs/changelog/101126.yaml b/docs/changelog/101126.yaml deleted file mode 100644 index 7a0f45891b171..0000000000000 --- a/docs/changelog/101126.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101126 -summary: Include totals in flamegraph response -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/101147.yaml b/docs/changelog/101147.yaml deleted file mode 100644 index cb556af35eead..0000000000000 --- a/docs/changelog/101147.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101147 -summary: Persist data counts on job close before results index refresh -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/101148.yaml b/docs/changelog/101148.yaml deleted file mode 100644 index eabe288e69e88..0000000000000 --- a/docs/changelog/101148.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101148 -summary: Add support for marking component templates as deprecated -area: Indices APIs -type: enhancement -issues: - - 100992 diff --git a/docs/changelog/101185.yaml b/docs/changelog/101185.yaml deleted file mode 100644 index 63d3a4da328b1..0000000000000 --- a/docs/changelog/101185.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101185 -summary: Repo analysis of uncontended register behaviour -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/101202.yaml b/docs/changelog/101202.yaml deleted file mode 100644 index 565338a2dbb6e..0000000000000 --- a/docs/changelog/101202.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101202 -summary: Optimize `MurmurHash3` -area: "Ingest Node" -type: enhancement -issues: [] diff --git a/docs/changelog/101230.yaml b/docs/changelog/101230.yaml deleted file mode 100644 index 3ed7eacb3fce0..0000000000000 --- a/docs/changelog/101230.yaml +++ /dev/null @@ -1,12 +0,0 @@ -pr: 101230 -summary: Enable query phase parallelism within a single shard -area: Search -type: enhancement -issues: - - 80693 -highlight: - title: Enable query phase parallelism within a single shard - body: |- - Activate inter-segment search concurrency by default in the query phase, in order to - enable parallelizing search execution across segments that a single shard is made of. - notable: true diff --git a/docs/changelog/101235.yaml b/docs/changelog/101235.yaml deleted file mode 100644 index 53adf9527c2c4..0000000000000 --- a/docs/changelog/101235.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101235 -summary: Load different way -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101311.yaml b/docs/changelog/101311.yaml deleted file mode 100644 index e4786b937e060..0000000000000 --- a/docs/changelog/101311.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101311 -summary: Cache resolved index for mgets -area: CRUD -type: enhancement -issues: [] diff --git a/docs/changelog/101333.yaml b/docs/changelog/101333.yaml deleted file mode 100644 index 4452687b995d3..0000000000000 --- a/docs/changelog/101333.yaml +++ /dev/null @@ -1,29 +0,0 @@ -pr: 101333 -summary: Fixed JWT principal from claims -area: Authorization -type: breaking -issues: [] -breaking: - title: Fixed JWT principal from claims - area: Authorization - details: "This changes the format of a JWT's principal before the JWT is actually\ - \ validated by any JWT realm. The JWT's principal is a convenient way to refer\ - \ to a JWT that has not yet been verified by a JWT realm. The JWT's principal\ - \ is printed in the audit and regular logs (notably for auditing authn failures)\ - \ as well as the smart realm chain reordering optimization. The JWT principal\ - \ is NOT required to be identical to the JWT-authenticated user's principal, but\ - \ in general, they should be similar. Previously, the JWT's principal was built\ - \ by individual realms in the same way the realms built the authenticated user's\ - \ principal. This had the advantage that, in simpler JWT realms configurations\ - \ (e.g. a single JWT realm in the chain), the JWT principal and the authenticated\ - \ user's principal are very similar. However the drawback is that, in general,\ - \ the JWT principal and the user principal can be very different (i.e. in the\ - \ case where one JWT realm builds the JWT principal and a different one builds\ - \ the user principal). Another downside is that the (unauthenticated) JWT principal\ - \ depended on realm ordering, which makes identifying the JWT from its principal\ - \ dependent on the ES authn realm configuration. This PR implements a consistent\ - \ fixed logic to build the JWT principal, which only depends on the JWT's claims\ - \ and no ES configuration." - impact: "Users will observe changed format and values for the `user.name` attribute\ - \ of `authentication_failed` audit log events, in the JWT (failed) authn case." - notable: false diff --git a/docs/changelog/101346.yaml b/docs/changelog/101346.yaml deleted file mode 100644 index b32b123c506d1..0000000000000 --- a/docs/changelog/101346.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101346 -summary: Report full stack trace for non-state file settings transforms -area: Infra/Settings -type: bug -issues: [] diff --git a/docs/changelog/101383.yaml b/docs/changelog/101383.yaml deleted file mode 100644 index 4875403acfaeb..0000000000000 --- a/docs/changelog/101383.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101383 -summary: "ESQL: Track memory from values loaded from lucene" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101385.yaml b/docs/changelog/101385.yaml deleted file mode 100644 index 406ed804cbbcc..0000000000000 --- a/docs/changelog/101385.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101385 -summary: "ESQL: Fix planning of MV_EXPAND with foldable expressions" -area: ES|QL -type: bug -issues: - - 101118 diff --git a/docs/changelog/101390.yaml b/docs/changelog/101390.yaml deleted file mode 100644 index 23bdef6e39dfe..0000000000000 --- a/docs/changelog/101390.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101390 -summary: Enable inter-segment concurrency for terms aggs -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/101392.yaml b/docs/changelog/101392.yaml deleted file mode 100644 index af79917245726..0000000000000 --- a/docs/changelog/101392.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101392 -summary: Include ML processor limits in `_ml/info` response -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/101396.yaml b/docs/changelog/101396.yaml deleted file mode 100644 index a486b2bed9237..0000000000000 --- a/docs/changelog/101396.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101396 -summary: "ESQL: Track blocks emitted from lucene" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101409.yaml b/docs/changelog/101409.yaml deleted file mode 100644 index 82e7f339fdd89..0000000000000 --- a/docs/changelog/101409.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101409 -summary: Adding a simulate ingest api -area: Ingest Node -type: feature -issues: [] diff --git a/docs/changelog/101423.yaml b/docs/changelog/101423.yaml deleted file mode 100644 index a5497d444797f..0000000000000 --- a/docs/changelog/101423.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101423 -summary: Export circuit breaker trip count as a counter metric -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/101426.yaml b/docs/changelog/101426.yaml deleted file mode 100644 index f9053ba1c1ec1..0000000000000 --- a/docs/changelog/101426.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101426 -summary: Add undesired shard count -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/101457.yaml b/docs/changelog/101457.yaml deleted file mode 100644 index 03bdbe39b5b8e..0000000000000 --- a/docs/changelog/101457.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 101457 -summary: "Remove Plugin.createComponents method in favour of overload with a PluginServices object" -area: Infra/Plugins -type: breaking-java -breaking: - area: "Java API" - title: "Plugin.createComponents method has been refactored to take a single PluginServices object" - details: > - Plugin.createComponents currently takes several different service arguments. The signature of this method changes - every time a new service is added. The method has now been modified to take a single interface object - that new services are added to. This will reduce API incompatibility issues when a new service - is introduced in the future. - impact: "Plugins that override createComponents will need to be refactored to override the new method on ES 8.12+" - notable: false diff --git a/docs/changelog/101474.yaml b/docs/changelog/101474.yaml deleted file mode 100644 index 2c013fe5d2537..0000000000000 --- a/docs/changelog/101474.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101474 -summary: "[Search Applications] Return 400 response when template rendering produces invalid JSON" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/101488.yaml b/docs/changelog/101488.yaml deleted file mode 100644 index 1db48a63f8542..0000000000000 --- a/docs/changelog/101488.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101488 -summary: "ESQL: More tracking in `BlockHash` impls" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101518.yaml b/docs/changelog/101518.yaml deleted file mode 100644 index 53db542640348..0000000000000 --- a/docs/changelog/101518.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101518 -summary: Check that scripts produce correct json in render template action -area: Search -type: bug -issues: - - 101477 diff --git a/docs/changelog/101535.yaml b/docs/changelog/101535.yaml deleted file mode 100644 index 79ed78fa1d7a1..0000000000000 --- a/docs/changelog/101535.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101535 -summary: Disable inter-segment concurrency when sorting by field -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/101577.yaml b/docs/changelog/101577.yaml deleted file mode 100644 index e485fd3811cb6..0000000000000 --- a/docs/changelog/101577.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101577 -summary: Add metrics to the shared blob cache -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/101585.yaml b/docs/changelog/101585.yaml deleted file mode 100644 index 71815df1f48d9..0000000000000 --- a/docs/changelog/101585.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101585 -summary: Reroute on shard snapshot completion -area: Snapshot/Restore -type: bug -issues: - - 101514 diff --git a/docs/changelog/101607.yaml b/docs/changelog/101607.yaml deleted file mode 100644 index 18ee7f1bdc5cc..0000000000000 --- a/docs/changelog/101607.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101607 -summary: Log stacktrace together with log message in order to help debugging -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/101609.yaml b/docs/changelog/101609.yaml deleted file mode 100644 index 27993574743d2..0000000000000 --- a/docs/changelog/101609.yaml +++ /dev/null @@ -1,9 +0,0 @@ -pr: 101609 -summary: > - Add a node feature join barrier. This prevents nodes from joining clusters that do not have - all the features already present in the cluster. This ensures that once a features is supported - by all the nodes in a cluster, that feature will never then not be supported in the future. - This is the corresponding functionality for the version join barrier, but for features -area: "Cluster Coordination" -type: feature -issues: [] diff --git a/docs/changelog/101660.yaml b/docs/changelog/101660.yaml deleted file mode 100644 index cb3d3118d15a6..0000000000000 --- a/docs/changelog/101660.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101660 -summary: Fall through malformed JWTs to subsequent realms in the chain -area: Authentication -type: bug -issues: - - 101367 diff --git a/docs/changelog/101682.yaml b/docs/changelog/101682.yaml deleted file mode 100644 index e512006057581..0000000000000 --- a/docs/changelog/101682.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101682 -summary: "Add manage_enrich cluster privilege to kibana_system role" -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/101700.yaml b/docs/changelog/101700.yaml deleted file mode 100644 index 08671360688a7..0000000000000 --- a/docs/changelog/101700.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101700 -summary: Fix `lastUnsafeSegmentGenerationForGets` for realtime get -area: Engine -type: bug -issues: [] diff --git a/docs/changelog/101723.yaml b/docs/changelog/101723.yaml deleted file mode 100644 index 146d164805f00..0000000000000 --- a/docs/changelog/101723.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101723 -summary: Allowing non-dynamic index settings to be updated by automatically unassigning - shards -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/101727.yaml b/docs/changelog/101727.yaml deleted file mode 100644 index 24a7e1d5b4e48..0000000000000 --- a/docs/changelog/101727.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101727 -summary: Fix listeners in `SharedBlobCacheService.readMultiRegions` -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/101753.yaml b/docs/changelog/101753.yaml deleted file mode 100644 index 7b64075998430..0000000000000 --- a/docs/changelog/101753.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101753 -summary: Expose roles by default in cat allocation API -area: CAT APIs -type: enhancement -issues: [] diff --git a/docs/changelog/101788.yaml b/docs/changelog/101788.yaml deleted file mode 100644 index b7cc1e20663e8..0000000000000 --- a/docs/changelog/101788.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101788 -summary: "ESQL: Narrow catch in convert functions" -area: ES|QL -type: bug -issues: - - 100820 diff --git a/docs/changelog/101802.yaml b/docs/changelog/101802.yaml deleted file mode 100644 index 20e857c32f664..0000000000000 --- a/docs/changelog/101802.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101802 -summary: Correctly logging watcher history write failures -area: Watcher -type: bug -issues: [] diff --git a/docs/changelog/101815.yaml b/docs/changelog/101815.yaml deleted file mode 100644 index 511e23beb68ef..0000000000000 --- a/docs/changelog/101815.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101815 -summary: Run `TransportGetAliasesAction` on local node -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/101826.yaml b/docs/changelog/101826.yaml deleted file mode 100644 index 87f3f8df1b0c2..0000000000000 --- a/docs/changelog/101826.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101826 -summary: Support keyed histograms -area: Aggregations -type: enhancement -issues: - - 100242 diff --git a/docs/changelog/101845.yaml b/docs/changelog/101845.yaml deleted file mode 100644 index 0dd95bdabca57..0000000000000 --- a/docs/changelog/101845.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101845 -summary: Introduce new endpoint to expose data stream lifecycle stats -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/101846.yaml b/docs/changelog/101846.yaml deleted file mode 100644 index 52dfff8801c62..0000000000000 --- a/docs/changelog/101846.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101846 -summary: Set `ActiveProcessorCount` when `node.processors` is set -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/101847.yaml b/docs/changelog/101847.yaml deleted file mode 100644 index 91922b9e23ed0..0000000000000 --- a/docs/changelog/101847.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101847 -summary: Add an additional tiebreaker to RRF -area: Ranking -type: bug -issues: - - 101232 diff --git a/docs/changelog/101859.yaml b/docs/changelog/101859.yaml deleted file mode 100644 index 54f3fb12810ca..0000000000000 --- a/docs/changelog/101859.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101859 -summary: Cover head/tail commands edge cases and data types coverage -area: EQL -type: bug -issues: - - 101724 diff --git a/docs/changelog/101868.yaml b/docs/changelog/101868.yaml deleted file mode 100644 index d7cf650d25ed2..0000000000000 --- a/docs/changelog/101868.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101868 -summary: Read scores from downloaded vocabulary for XLM Roberta tokenizers -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/101872.yaml b/docs/changelog/101872.yaml new file mode 100644 index 0000000000000..1c63c2d8b009a --- /dev/null +++ b/docs/changelog/101872.yaml @@ -0,0 +1,6 @@ +pr: 101872 +summary: "Add `require_data_stream` parameter to indexing requests to enforce indexing operations target a data stream" +area: Data streams +type: feature +issues: + - 97032 diff --git a/docs/changelog/101904.yaml b/docs/changelog/101904.yaml deleted file mode 100644 index cad422cc52e15..0000000000000 --- a/docs/changelog/101904.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101904 -summary: Allow granting API keys with JWT as the access_token -area: Security -type: feature -issues: [] diff --git a/docs/changelog/101979.yaml b/docs/changelog/101979.yaml deleted file mode 100644 index ad119df24d36f..0000000000000 --- a/docs/changelog/101979.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101979 -summary: Calculate CO2 and emmission and costs -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/101989.yaml b/docs/changelog/101989.yaml deleted file mode 100644 index d294d194bd4e8..0000000000000 --- a/docs/changelog/101989.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101989 -summary: Add message field to `HealthPeriodicLogger` and `S3RequestRetryStats` -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/102020.yaml b/docs/changelog/102020.yaml deleted file mode 100644 index 7c74e9676d342..0000000000000 --- a/docs/changelog/102020.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102020 -summary: Retrieve stacktrace events from a custom index -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/102032.yaml b/docs/changelog/102032.yaml deleted file mode 100644 index 40463b9f252b9..0000000000000 --- a/docs/changelog/102032.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102032 -summary: Add vector_operation_count in profile output for knn searches -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/102048.yaml b/docs/changelog/102048.yaml deleted file mode 100644 index 54bc1d9eae52e..0000000000000 --- a/docs/changelog/102048.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102048 -summary: "Repo analysis: verify empty register" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/102051.yaml b/docs/changelog/102051.yaml deleted file mode 100644 index c3ca4a546928f..0000000000000 --- a/docs/changelog/102051.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102051 -summary: "Repo analysis: allow configuration of register ops" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/102056.yaml b/docs/changelog/102056.yaml deleted file mode 100644 index 455f66ba90b03..0000000000000 --- a/docs/changelog/102056.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102056 -summary: Use `BulkRequest` to store Application Privileges -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/102065.yaml b/docs/changelog/102065.yaml deleted file mode 100644 index 1a9a219df4502..0000000000000 --- a/docs/changelog/102065.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102065 -summary: Add more desired balance stats -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/102075.yaml b/docs/changelog/102075.yaml deleted file mode 100644 index 54daae04169db..0000000000000 --- a/docs/changelog/102075.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102075 -summary: Accept a single or multiple inputs to `_inference` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102089.yaml b/docs/changelog/102089.yaml deleted file mode 100644 index 9f33c0648d09f..0000000000000 --- a/docs/changelog/102089.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102089 -summary: Add prefix strings option to trained models -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102093.yaml b/docs/changelog/102093.yaml deleted file mode 100644 index f6922c0d36be6..0000000000000 --- a/docs/changelog/102093.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 102093 -summary: Add byte quantization for float vectors in HNSW -area: Vector Search -type: feature -issues: [] -highlight: - title: Add new `int8_hsnw` index type for int8 quantization for HNSW - body: |- - This commit adds a new index type called `int8_hnsw`. This index will - automatically quantized float32 values into int8 byte values. While - this increases disk usage by 25%, it reduces memory required for - fast HNSW search by 75%. Dramatically reducing the resource overhead - required for dense vector search. - notable: true diff --git a/docs/changelog/102138.yaml b/docs/changelog/102138.yaml deleted file mode 100644 index 3819e3201150e..0000000000000 --- a/docs/changelog/102138.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102138 -summary: Skip shards that don't match the source query during checkpointing -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/102140.yaml b/docs/changelog/102140.yaml deleted file mode 100644 index 0f086649b9710..0000000000000 --- a/docs/changelog/102140.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102140 -summary: Collect data tiers usage stats more efficiently -area: ILM+SLM -type: bug -issues: - - 100230 \ No newline at end of file diff --git a/docs/changelog/102165.yaml b/docs/changelog/102165.yaml deleted file mode 100644 index e1c4c76f1f6ff..0000000000000 --- a/docs/changelog/102165.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102165 -summary: Fix planning of duplicate aggs -area: ES|QL -type: bug -issues: - - 102083 diff --git a/docs/changelog/102172.yaml b/docs/changelog/102172.yaml deleted file mode 100644 index 485c2c4327e11..0000000000000 --- a/docs/changelog/102172.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102172 -summary: Adjust Histogram's bucket accounting to be iteratively -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/102177.yaml b/docs/changelog/102177.yaml deleted file mode 100644 index 62d7b11b86513..0000000000000 --- a/docs/changelog/102177.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102177 -summary: "GEO_POINT and CARTESIAN_POINT type support" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/102183.yaml b/docs/changelog/102183.yaml deleted file mode 100644 index 3daa1418ba5d0..0000000000000 --- a/docs/changelog/102183.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 102183 -summary: "[ES|QL] pow function always returns double" -area: ES|QL -type: "breaking" -issues: - - 99055 -breaking: - title: "[ES|QL] pow function always returns double" - area: REST API - details: "In ES|QL, the pow function no longer returns the type of its inputs, instead\ - \ always returning a double." - impact: low. Most queries should continue to function with the change. - notable: false diff --git a/docs/changelog/102184.yaml b/docs/changelog/102184.yaml deleted file mode 100644 index ba4d045b6b0aa..0000000000000 --- a/docs/changelog/102184.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102184 -summary: Track ESQL enrich memory -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102188.yaml b/docs/changelog/102188.yaml deleted file mode 100644 index 595a8395fab5c..0000000000000 --- a/docs/changelog/102188.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102188 -summary: Track blocks in `AsyncOperator` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102190.yaml b/docs/changelog/102190.yaml deleted file mode 100644 index cd04e041fca5e..0000000000000 --- a/docs/changelog/102190.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102190 -summary: Track pages in ESQL enrich request/response -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102192.yaml b/docs/changelog/102192.yaml deleted file mode 100644 index 531aa943c9e36..0000000000000 --- a/docs/changelog/102192.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102192 -summary: "ESQL: Load more than one field at once" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102193.yaml b/docs/changelog/102193.yaml deleted file mode 100644 index 4d64493602ff2..0000000000000 --- a/docs/changelog/102193.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102193 -summary: Fix cache invalidation on privilege modification -area: Authorization -type: bug -issues: [] diff --git a/docs/changelog/102208.yaml b/docs/changelog/102208.yaml deleted file mode 100644 index b566a85753d82..0000000000000 --- a/docs/changelog/102208.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102208 -summary: Add static node settings to set default values for max merged segment sizes -area: Engine -type: enhancement -issues: [] diff --git a/docs/changelog/102244.yaml b/docs/changelog/102244.yaml deleted file mode 100644 index 3b160e033b57e..0000000000000 --- a/docs/changelog/102244.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102244 -summary: Expose reconciliation metrics via APM -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/102245.yaml b/docs/changelog/102245.yaml deleted file mode 100644 index 387540d96290c..0000000000000 --- a/docs/changelog/102245.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102245 -summary: Add non-green indicator names to `HealthPeriodicLogger` message -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/102248.yaml b/docs/changelog/102248.yaml deleted file mode 100644 index 854e8afde4086..0000000000000 --- a/docs/changelog/102248.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102248 -summary: Node stats as metrics -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102273.yaml b/docs/changelog/102273.yaml deleted file mode 100644 index 78ecc8b2d2734..0000000000000 --- a/docs/changelog/102273.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102273 -summary: Improve analyzer reload log message -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/102292.yaml b/docs/changelog/102292.yaml deleted file mode 100644 index 953c3ffdf6150..0000000000000 --- a/docs/changelog/102292.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102292 -summary: Consider duplicate stacktraces in custom index -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/102317.yaml b/docs/changelog/102317.yaml deleted file mode 100644 index 89b2ae5432101..0000000000000 --- a/docs/changelog/102317.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102317 -summary: "ESQL: Fix single value query" -area: ES|QL -type: bug -issues: - - 102298 diff --git a/docs/changelog/102350.yaml b/docs/changelog/102350.yaml deleted file mode 100644 index 00a311c5d99f8..0000000000000 --- a/docs/changelog/102350.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102350 -summary: "ESQL: Fix rare bug with empty string" -area: ES|QL -type: bug -issues: - - 101969 diff --git a/docs/changelog/102371.yaml b/docs/changelog/102371.yaml new file mode 100644 index 0000000000000..5a698bc9d671a --- /dev/null +++ b/docs/changelog/102371.yaml @@ -0,0 +1,5 @@ +pr: 102371 +summary: Adding threadpool metrics +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/102379.yaml b/docs/changelog/102379.yaml deleted file mode 100644 index 0773b137779a5..0000000000000 --- a/docs/changelog/102379.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102379 -summary: Pass source query to `_field_caps` (as `index_filter`) when deducing destination index mappings for better - performance -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/102388.yaml b/docs/changelog/102388.yaml deleted file mode 100644 index 3e65e46949bda..0000000000000 --- a/docs/changelog/102388.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102388 -summary: Add support for `index_filter` to open pit -area: Search -type: enhancement -issues: - - 99740 diff --git a/docs/changelog/102391.yaml b/docs/changelog/102391.yaml deleted file mode 100644 index 5fcbb9e6d2858..0000000000000 --- a/docs/changelog/102391.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102391 -summary: "ESQL: Support the `_source` metadata field" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102417.yaml b/docs/changelog/102417.yaml deleted file mode 100644 index 09c1a4f49dbfd..0000000000000 --- a/docs/changelog/102417.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102417 -summary: "ESQL: emit warnings from single-value functions processing multi-values" -area: ES|QL -type: feature -issues: - - 98743 diff --git a/docs/changelog/102426.yaml b/docs/changelog/102426.yaml deleted file mode 100644 index 3aad50ed1eee0..0000000000000 --- a/docs/changelog/102426.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102426 -summary: Patterns support for allowed subjects by the JWT realm -area: Authentication -type: feature -issues: [] diff --git a/docs/changelog/102428.yaml b/docs/changelog/102428.yaml new file mode 100644 index 0000000000000..275492fa6a888 --- /dev/null +++ b/docs/changelog/102428.yaml @@ -0,0 +1,5 @@ +pr: 102428 +summary: "ESQL: Add option to drop null fields" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/102434.yaml b/docs/changelog/102434.yaml deleted file mode 100644 index ab6aa886c13b1..0000000000000 --- a/docs/changelog/102434.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102434 -summary: "ESQL: Short circuit loading empty doc values" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102447.yaml b/docs/changelog/102447.yaml deleted file mode 100644 index 76823153670bd..0000000000000 --- a/docs/changelog/102447.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102447 -summary: Pass transform source query as `index_filter` to `open_point_in_time` request -area: Transform -type: enhancement -issues: - - 101049 diff --git a/docs/changelog/102456.yaml b/docs/changelog/102456.yaml deleted file mode 100644 index 6ef3b8f16f53c..0000000000000 --- a/docs/changelog/102456.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102456 -summary: Switch logs data streams to search all fields by default -area: Data streams -type: enhancement -issues: - - 99872 diff --git a/docs/changelog/102461.yaml b/docs/changelog/102461.yaml deleted file mode 100644 index c0c07554ed21f..0000000000000 --- a/docs/changelog/102461.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102461 -summary: Enable concurrency for scripted metric agg -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102462.yaml b/docs/changelog/102462.yaml deleted file mode 100644 index d44ccc4cbbc5c..0000000000000 --- a/docs/changelog/102462.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102462 -summary: Check the real memory circuit breaker when building global ordinals -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102472.yaml b/docs/changelog/102472.yaml deleted file mode 100644 index b0f5bfc714643..0000000000000 --- a/docs/changelog/102472.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102472 -summary: Expose the `invalidation` field in Get/Query `ApiKey` APIs -area: Security -type: enhancement -issues: [ ] diff --git a/docs/changelog/102476.yaml b/docs/changelog/102476.yaml deleted file mode 100644 index a53a20ecfec20..0000000000000 --- a/docs/changelog/102476.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102476 -summary: Unwrap `ExecutionException` when loading from cache in `AbstractIndexOrdinalsFieldData` -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/102490.yaml b/docs/changelog/102490.yaml deleted file mode 100644 index 8ff554ab0f0fe..0000000000000 --- a/docs/changelog/102490.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102490 -summary: "ESQL: Load text field from parent keyword field" -area: ES|QL -type: enhancement -issues: - - 102473 diff --git a/docs/changelog/102495.yaml b/docs/changelog/102495.yaml deleted file mode 100644 index 77ae42f7eebcb..0000000000000 --- a/docs/changelog/102495.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102495 -summary: "Add support for configuring proxy scheme in S3 client settings and EC2 discovery plugin" -area: Distributed -type: enhancement -issues: - - 101873 diff --git a/docs/changelog/102510.yaml b/docs/changelog/102510.yaml deleted file mode 100644 index 2b654b5c85929..0000000000000 --- a/docs/changelog/102510.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 102510 -summary: "ESQL: Make fieldcaps calls lighter" -area: ES|QL -type: enhancement -issues: - - 101763 - - 102393 diff --git a/docs/changelog/102511.yaml b/docs/changelog/102511.yaml deleted file mode 100644 index cf80ca03e197f..0000000000000 --- a/docs/changelog/102511.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102511 -summary: Trigger parent circuit breaker when building scorers in filters aggregation -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/102512.yaml b/docs/changelog/102512.yaml deleted file mode 100644 index d4bc765ecaf5f..0000000000000 --- a/docs/changelog/102512.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102512 -summary: Implement exponential backoff for transform state persistence retrying -area: Transform -type: enhancement -issues: - - 102528 diff --git a/docs/changelog/102562.yaml b/docs/changelog/102562.yaml deleted file mode 100644 index a4b4f5a095118..0000000000000 --- a/docs/changelog/102562.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102562 -summary: Track blocks of intermediate state of aggs -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102570.yaml b/docs/changelog/102570.yaml deleted file mode 100644 index 2d3f878dbbb27..0000000000000 --- a/docs/changelog/102570.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102570 -summary: Added `beat.stats.libbeat.pipeline.queue.max_events` -area: Monitoring -type: enhancement -issues: [] diff --git a/docs/changelog/102571.yaml b/docs/changelog/102571.yaml deleted file mode 100644 index 25272408161db..0000000000000 --- a/docs/changelog/102571.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102571 -summary: Allow executing multiple periodic flushes while they are being made durable -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/102598.yaml b/docs/changelog/102598.yaml deleted file mode 100644 index c32519acdf6d1..0000000000000 --- a/docs/changelog/102598.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102598 -summary: Add apm api for asynchronous counters (always increasing) -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/102602.yaml b/docs/changelog/102602.yaml deleted file mode 100644 index dd01eaa98b214..0000000000000 --- a/docs/changelog/102602.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102602 -summary: Consider search context missing exceptions as recoverable -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/102612.yaml b/docs/changelog/102612.yaml deleted file mode 100644 index 60808ae72801a..0000000000000 --- a/docs/changelog/102612.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102612 -summary: Track blocks when hashing single multi-valued field -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102636.yaml b/docs/changelog/102636.yaml deleted file mode 100644 index 8b32e0568b0fb..0000000000000 --- a/docs/changelog/102636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102636 -summary: Revert non-semantic `NodeInfo` -area: Infra/Core -type: regression -issues: [] diff --git a/docs/changelog/102637.yaml b/docs/changelog/102637.yaml deleted file mode 100644 index 4d5d689934bd6..0000000000000 --- a/docs/changelog/102637.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102637 -summary: Improve stability of spike and dip detection for the change point aggregation -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102644.yaml b/docs/changelog/102644.yaml deleted file mode 100644 index 17c5cbebed7cc..0000000000000 --- a/docs/changelog/102644.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102644 -summary: Disable parallelism for composite agg against high cardinality fields -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102673.yaml b/docs/changelog/102673.yaml deleted file mode 100644 index 16546edb3cf3c..0000000000000 --- a/docs/changelog/102673.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102673 -summary: "ESQL: Share constant null Blocks" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102680.yaml b/docs/changelog/102680.yaml deleted file mode 100644 index 8b32c5029ea2a..0000000000000 --- a/docs/changelog/102680.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102680 -summary: Make `api_key.delete.interval` a dynamic setting -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/102682.yaml b/docs/changelog/102682.yaml deleted file mode 100644 index 190ff3df5a7f6..0000000000000 --- a/docs/changelog/102682.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102682 -summary: Introduce fielddata cache ttl -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102710.yaml b/docs/changelog/102710.yaml deleted file mode 100644 index ee805c70180a0..0000000000000 --- a/docs/changelog/102710.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102710 -summary: Enable concurrency for multi terms agg -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102713.yaml b/docs/changelog/102713.yaml deleted file mode 100644 index 278d7d4ffb129..0000000000000 --- a/docs/changelog/102713.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102713 -summary: "ESQL: Add `profile` option" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102727.yaml b/docs/changelog/102727.yaml deleted file mode 100644 index 4f4d4fbf48899..0000000000000 --- a/docs/changelog/102727.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102727 -summary: "ESQL: Load stored fields sequentially" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102731.yaml b/docs/changelog/102731.yaml deleted file mode 100644 index a12e04bfab078..0000000000000 --- a/docs/changelog/102731.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102731 -summary: Add internal inference action for ml models an services -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102735.yaml b/docs/changelog/102735.yaml deleted file mode 100644 index 4726e08d1f314..0000000000000 --- a/docs/changelog/102735.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102735 -summary: "[Profiling] Report in status API if docs exist" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/102740.yaml b/docs/changelog/102740.yaml deleted file mode 100644 index b7fc10eb19ddb..0000000000000 --- a/docs/changelog/102740.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102740 -summary: "[Profiling] Notify early about task cancellation" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/102767.yaml b/docs/changelog/102767.yaml deleted file mode 100644 index cf1edeeb51265..0000000000000 --- a/docs/changelog/102767.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102767 -summary: "ESQL: remove `time_zone` request parameter" -area: ES|QL -type: bug -issues: - - 102159 diff --git a/docs/changelog/102806.yaml b/docs/changelog/102806.yaml deleted file mode 100644 index faa971ec1d879..0000000000000 --- a/docs/changelog/102806.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102806 -summary: Support for GET all models and by task type in the `_inference` API -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102808.yaml b/docs/changelog/102808.yaml deleted file mode 100644 index 4e3df80a28319..0000000000000 --- a/docs/changelog/102808.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102808 -summary: Active shards message corrected for search shards -area: Distributed -type: bug -issues: - - 101896 diff --git a/docs/changelog/102810.yaml b/docs/changelog/102810.yaml deleted file mode 100644 index f5faf7a321dbc..0000000000000 --- a/docs/changelog/102810.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102810 -summary: Add memory utilization Kibana metric to the monitoring index templates -area: Monitoring -type: enhancement -issues: [] diff --git a/docs/changelog/102811.yaml b/docs/changelog/102811.yaml deleted file mode 100644 index 039a337a53e87..0000000000000 --- a/docs/changelog/102811.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102811 -summary: Split comma-separated source index strings into separate indices -area: Transform -type: bug -issues: - - 99564 diff --git a/docs/changelog/102832.yaml b/docs/changelog/102832.yaml deleted file mode 100644 index 7daf22263b2e9..0000000000000 --- a/docs/changelog/102832.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102832 -summary: Disable concurrency for sampler and diversified sampler -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/102840.yaml b/docs/changelog/102840.yaml deleted file mode 100644 index 1d87cede632c9..0000000000000 --- a/docs/changelog/102840.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102840 -summary: Fail S3 repository analysis on partial reads -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/102843.yaml b/docs/changelog/102843.yaml deleted file mode 100644 index 7e561fa7cc582..0000000000000 --- a/docs/changelog/102843.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102843 -summary: Restore `SharedBytes.IO` refcounting on reads & writes -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/102844.yaml b/docs/changelog/102844.yaml deleted file mode 100644 index d05547c3aa9da..0000000000000 --- a/docs/changelog/102844.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102844 -summary: Skip global ordinals loading if query does not match after rewrite -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/102848.yaml b/docs/changelog/102848.yaml deleted file mode 100644 index 971d91a878579..0000000000000 --- a/docs/changelog/102848.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102848 -summary: Decref `SharedBytes.IO` after read is done not before -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/102877.yaml b/docs/changelog/102877.yaml deleted file mode 100644 index da2de19b19a90..0000000000000 --- a/docs/changelog/102877.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102877 -summary: Add basic telelemetry for the inference feature -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102888.yaml b/docs/changelog/102888.yaml deleted file mode 100644 index 79ea9cbe712de..0000000000000 --- a/docs/changelog/102888.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102888 -summary: "Optimize `_count` type API requests" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102901.yaml b/docs/changelog/102901.yaml deleted file mode 100644 index ac417691b525c..0000000000000 --- a/docs/changelog/102901.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102901 -summary: Introduce local block factory -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102902.yaml b/docs/changelog/102902.yaml deleted file mode 100644 index b33afdd35a603..0000000000000 --- a/docs/changelog/102902.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102902 -summary: Fast path for reading single doc with ordinals -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102906.yaml b/docs/changelog/102906.yaml deleted file mode 100644 index 3efaa2db58390..0000000000000 --- a/docs/changelog/102906.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102906 -summary: Introduce a `StreamOutput` that counts how many bytes are written to the - stream -area: Distributed -type: enhancement -issues: [] diff --git a/docs/changelog/102916.yaml b/docs/changelog/102916.yaml deleted file mode 100644 index 3943f34d91221..0000000000000 --- a/docs/changelog/102916.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102916 -summary: Fix layout for MV_EXPAND -area: ES|QL -type: bug -issues: - - 102912 diff --git a/docs/changelog/102919.yaml b/docs/changelog/102919.yaml deleted file mode 100644 index 0de2e75abc6cf..0000000000000 --- a/docs/changelog/102919.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102919 -summary: Error log when license verification fails locally -area: License -type: bug -issues: [] diff --git a/docs/changelog/102925.yaml b/docs/changelog/102925.yaml deleted file mode 100644 index 5dd15f4f60429..0000000000000 --- a/docs/changelog/102925.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102925 -summary: Add ldap user metadata mappings for full name and email -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/102937.yaml b/docs/changelog/102937.yaml deleted file mode 100644 index 116fbadebe09d..0000000000000 --- a/docs/changelog/102937.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102937 -summary: "ESQL: New telemetry commands" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102944.yaml b/docs/changelog/102944.yaml deleted file mode 100644 index 58a1bb8f6bbaa..0000000000000 --- a/docs/changelog/102944.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102944 -summary: "If trained model download task is in progress, wait for it to finish before\ - \ executing start trained model deployment" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/102967.yaml b/docs/changelog/102967.yaml deleted file mode 100644 index cdde735f6c077..0000000000000 --- a/docs/changelog/102967.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102967 -summary: "ES|QL: Improve resolution error management in `mv_expand`" -area: ES|QL -type: bug -issues: - - 102964 diff --git a/docs/changelog/102994.yaml b/docs/changelog/102994.yaml deleted file mode 100644 index c35baaefcb723..0000000000000 --- a/docs/changelog/102994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102994 -summary: Enable Connectors API as technical preview -area: Application -type: feature -issues: [] diff --git a/docs/changelog/103003.yaml b/docs/changelog/103003.yaml deleted file mode 100644 index accacc2b62416..0000000000000 --- a/docs/changelog/103003.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103003 -summary: "Fix: Watcher REST API `GET /_watcher/settings` now includes product header" -area: "Watcher" -type: bug -issues: - - 102928 diff --git a/docs/changelog/103013.yaml b/docs/changelog/103013.yaml deleted file mode 100644 index bb8eb99088856..0000000000000 --- a/docs/changelog/103013.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103013 -summary: Deprecate the unused `elasticsearch_version` field of enrich policy json -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/103024.yaml b/docs/changelog/103024.yaml deleted file mode 100644 index e860ad056f980..0000000000000 --- a/docs/changelog/103024.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103024 -summary: Fix template simulate setting application ordering -area: Indices APIs -type: bug -issues: - - 103008 diff --git a/docs/changelog/103061.yaml b/docs/changelog/103061.yaml deleted file mode 100644 index 558429493ac6f..0000000000000 --- a/docs/changelog/103061.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103061 -summary: "[Profiling] Query in parallel only if beneficial" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103116.yaml b/docs/changelog/103116.yaml deleted file mode 100644 index 402c83e16ec37..0000000000000 --- a/docs/changelog/103116.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103116 -summary: Fix `frequent_item_sets` aggregation on empty index -area: Machine Learning -type: bug -issues: - - 103067 diff --git a/docs/changelog/103124.yaml b/docs/changelog/103124.yaml deleted file mode 100644 index 078c8249bbf5d..0000000000000 --- a/docs/changelog/103124.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103124 -summary: Start a new trace context before loading a trained model -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/103134.yaml b/docs/changelog/103134.yaml new file mode 100644 index 0000000000000..13bb0323645f5 --- /dev/null +++ b/docs/changelog/103134.yaml @@ -0,0 +1,5 @@ +pr: 103134 +summary: CCS with `minimize_roundtrips` performs incremental merges of each `SearchResponse` +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/103150.yaml b/docs/changelog/103150.yaml deleted file mode 100644 index 3f42c882d89fb..0000000000000 --- a/docs/changelog/103150.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103150 -summary: "ES|QL: Fix NPE on single value detection" -area: ES|QL -type: bug -issues: - - 103141 diff --git a/docs/changelog/103151.yaml b/docs/changelog/103151.yaml deleted file mode 100644 index bd9eea97cac6d..0000000000000 --- a/docs/changelog/103151.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103151 -summary: Wrap painless explain error -area: Infra/Scripting -type: bug -issues: - - 103018 diff --git a/docs/changelog/103183.yaml b/docs/changelog/103183.yaml deleted file mode 100644 index cb28033cff6a7..0000000000000 --- a/docs/changelog/103183.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103183 -summary: "[Connectors API] Handle nullable fields correctly in the `ConnectorSyncJob`\ - \ parser" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103185.yaml b/docs/changelog/103185.yaml deleted file mode 100644 index 3a1a4960ba98c..0000000000000 --- a/docs/changelog/103185.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103185 -summary: Fix format string in `OldLuceneVersions` -area: Search -type: bug -issues: [] diff --git a/docs/changelog/103203.yaml b/docs/changelog/103203.yaml deleted file mode 100644 index d2aa3e9961c6a..0000000000000 --- a/docs/changelog/103203.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103203 -summary: Fix NPE & empty result handling in `CountOnlyQueryPhaseResultConsumer` -area: Search -type: bug -issues: [] diff --git a/docs/changelog/103209.yaml b/docs/changelog/103209.yaml deleted file mode 100644 index 05ae8c13bcb5c..0000000000000 --- a/docs/changelog/103209.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103209 -summary: "ESQL: Fix `to_degrees()` returning infinity" -area: ES|QL -type: bug -issues: - - 102987 diff --git a/docs/changelog/103212.yaml b/docs/changelog/103212.yaml deleted file mode 100644 index 3cbbddc8f2229..0000000000000 --- a/docs/changelog/103212.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103212 -summary: Use the eql query filter for the open-pit request -area: EQL -type: enhancement -issues: [] diff --git a/docs/changelog/103251.yaml b/docs/changelog/103251.yaml deleted file mode 100644 index 0c5c6d6e4d776..0000000000000 --- a/docs/changelog/103251.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103251 -summary: Wait for reroute before acking put-shutdown -area: Infra/Node Lifecycle -type: bug -issues: [] diff --git a/docs/changelog/103300.yaml b/docs/changelog/103300.yaml new file mode 100644 index 0000000000000..a536a673b7827 --- /dev/null +++ b/docs/changelog/103300.yaml @@ -0,0 +1,5 @@ +pr: 103300 +summary: Retry indefinitely for s3 indices blob read errors +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/103339.yaml b/docs/changelog/103339.yaml deleted file mode 100644 index 6ea1ab0cf799a..0000000000000 --- a/docs/changelog/103339.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103339 -summary: "ESQL: Fix resolution of MV_EXPAND after KEEP *" -area: ES|QL -type: bug -issues: - - 103331 diff --git a/docs/changelog/103342.yaml b/docs/changelog/103342.yaml deleted file mode 100644 index 32711d7a6b390..0000000000000 --- a/docs/changelog/103342.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103342 -summary: Use dataset size instead of on-disk size for data stream stats -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/103361.yaml b/docs/changelog/103361.yaml deleted file mode 100644 index 441acc09895ef..0000000000000 --- a/docs/changelog/103361.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103361 -summary: Prevent attempts to access non-existent node information during rebalancing -area: Machine Learning -type: bug -issues: [ ] diff --git a/docs/changelog/103399.yaml b/docs/changelog/103399.yaml new file mode 100644 index 0000000000000..440ac90b313f5 --- /dev/null +++ b/docs/changelog/103399.yaml @@ -0,0 +1,6 @@ +pr: 103399 +summary: "add validation on _id field when upsert new doc" +area: Search +type: bug +issues: + - 102981 diff --git a/docs/changelog/103408.yaml b/docs/changelog/103408.yaml deleted file mode 100644 index bf5081b854f08..0000000000000 --- a/docs/changelog/103408.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103408 -summary: Cache component versions -area: Infra/Core -type: bug -issues: - - 102103 diff --git a/docs/changelog/103427.yaml b/docs/changelog/103427.yaml deleted file mode 100644 index 57a27aa687ab7..0000000000000 --- a/docs/changelog/103427.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103427 -summary: "[Connector API] Fix bug with nullable tooltip field in parser" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103430.yaml b/docs/changelog/103430.yaml deleted file mode 100644 index cd2444270849d..0000000000000 --- a/docs/changelog/103430.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103430 -summary: "[Connectors API] Fix bug with missing TEXT `DisplayType` enum" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103435.yaml b/docs/changelog/103435.yaml deleted file mode 100644 index 95e3c7169ada9..0000000000000 --- a/docs/changelog/103435.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103435 -summary: Dispatch `ClusterStateAction#buildResponse` to executor -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/103474.yaml b/docs/changelog/103474.yaml deleted file mode 100644 index a1da15a6bfbe5..0000000000000 --- a/docs/changelog/103474.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103474 -summary: Fix now in millis for ESQL search contexts -area: ES|QL -type: bug -issues: - - 103455 diff --git a/docs/changelog/103508.yaml b/docs/changelog/103508.yaml deleted file mode 100644 index 9c6f79ef75657..0000000000000 --- a/docs/changelog/103508.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103508 -summary: "[Connectors API] Fix `ClassCastException` when creating a new sync job" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103511.yaml b/docs/changelog/103511.yaml new file mode 100644 index 0000000000000..20a48df914832 --- /dev/null +++ b/docs/changelog/103511.yaml @@ -0,0 +1,6 @@ +pr: 103511 +summary: Downsampling supports `date_histogram` with tz +area: Downsampling +type: bug +issues: + - 101309 diff --git a/docs/changelog/103530.yaml b/docs/changelog/103530.yaml deleted file mode 100644 index 6feb04467b03e..0000000000000 --- a/docs/changelog/103530.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103530 -summary: Exclude quantiles when fetching model snapshots where possible -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/103546.yaml b/docs/changelog/103546.yaml deleted file mode 100644 index 08584e8555bd4..0000000000000 --- a/docs/changelog/103546.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103546 -summary: Handle timeout on standalone rewrite calls -area: Search -type: bug -issues: [] diff --git a/docs/changelog/103574.yaml b/docs/changelog/103574.yaml deleted file mode 100644 index ed6ad237f49a2..0000000000000 --- a/docs/changelog/103574.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103574 -summary: Samples should check if the aggregations result is empty or null -area: EQL -type: bug -issues: [] diff --git a/docs/changelog/103580.yaml b/docs/changelog/103580.yaml deleted file mode 100644 index 6fd0328017d1f..0000000000000 --- a/docs/changelog/103580.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103580 -summary: Copy counter field properties to downsampled index -area: Downsampling -type: bug -issues: - - 103569 diff --git a/docs/changelog/103591.yaml b/docs/changelog/103591.yaml deleted file mode 100644 index 41b6e362c5713..0000000000000 --- a/docs/changelog/103591.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103591 -summary: Wait for the model results on graceful shutdown -area: Machine Learning -type: bug -issues: - - 103414 diff --git a/docs/changelog/103601.yaml b/docs/changelog/103601.yaml deleted file mode 100644 index bf7aaaf835e00..0000000000000 --- a/docs/changelog/103601.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 103601 -summary: Introduce Elasticsearch `PostingFormat` based on Lucene 90 positing format - using PFOR -area: Search -type: bug -issues: - - 103002 diff --git a/docs/changelog/103611.yaml b/docs/changelog/103611.yaml deleted file mode 100644 index 51c77cd286d66..0000000000000 --- a/docs/changelog/103611.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103611 -summary: Fix NPE on missing event queries -area: EQL -type: bug -issues: - - 103608 diff --git a/docs/changelog/103615.yaml b/docs/changelog/103615.yaml deleted file mode 100644 index 69498c749687f..0000000000000 --- a/docs/changelog/103615.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103615 -summary: Fix downsample api by returning a failure in case one or more downsample persistent tasks failed -area: Downsampling -type: bug -issues: [] diff --git a/docs/changelog/103627.yaml b/docs/changelog/103627.yaml new file mode 100644 index 0000000000000..4b0d9e937542e --- /dev/null +++ b/docs/changelog/103627.yaml @@ -0,0 +1,5 @@ +pr: 103627 +summary: Add gradle tasks and code to modify and access mappings between version ids and release versions +area: Infra/Core +type: feature +issues: [] diff --git a/docs/changelog/103656.yaml b/docs/changelog/103656.yaml new file mode 100644 index 0000000000000..24bd8814029ff --- /dev/null +++ b/docs/changelog/103656.yaml @@ -0,0 +1,5 @@ +pr: 103656 +summary: "ESQL: add =~ operator (case insensitive equality)" +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/103670.yaml b/docs/changelog/103670.yaml deleted file mode 100644 index ad3f0519b5d19..0000000000000 --- a/docs/changelog/103670.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103670 -summary: "ESQL: Improve local folding of aggregates" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/103682.yaml b/docs/changelog/103682.yaml new file mode 100644 index 0000000000000..109e77dd053a5 --- /dev/null +++ b/docs/changelog/103682.yaml @@ -0,0 +1,6 @@ +pr: 103682 +summary: Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` +area: Transform +type: bug +issues: + - 103115 diff --git a/docs/changelog/103690.yaml b/docs/changelog/103690.yaml deleted file mode 100644 index fa9076789c1cd..0000000000000 --- a/docs/changelog/103690.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103690 -summary: Restore inter-segment search concurrency with synthetic source is enabled -area: Search -type: bug -issues: [] diff --git a/docs/changelog/103741.yaml b/docs/changelog/103741.yaml new file mode 100644 index 0000000000000..6771ddd329f46 --- /dev/null +++ b/docs/changelog/103741.yaml @@ -0,0 +1,5 @@ +pr: 103741 +summary: Limit nesting depth in Exception XContent +area: Infra/Resiliency +type: bug +issues: [] diff --git a/docs/changelog/103758.yaml b/docs/changelog/103758.yaml deleted file mode 100644 index e77f228f134a0..0000000000000 --- a/docs/changelog/103758.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103758 -summary: Fix the transport version of `PlanStreamOutput` -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/103763.yaml b/docs/changelog/103763.yaml new file mode 100644 index 0000000000000..e4d6556c77077 --- /dev/null +++ b/docs/changelog/103763.yaml @@ -0,0 +1,6 @@ +pr: 103763 +summary: Ref count search response bytes +area: Search +type: enhancement +issues: + - 102657 diff --git a/docs/changelog/103817.yaml b/docs/changelog/103817.yaml new file mode 100644 index 0000000000000..ff8978f1d3776 --- /dev/null +++ b/docs/changelog/103817.yaml @@ -0,0 +1,6 @@ +pr: 103817 +summary: Fix deleting index during snapshot finalization +area: Snapshot/Restore +type: bug +issues: + - 101029 diff --git a/docs/changelog/103819.yaml b/docs/changelog/103819.yaml new file mode 100644 index 0000000000000..ef6e717572cc5 --- /dev/null +++ b/docs/changelog/103819.yaml @@ -0,0 +1,5 @@ +pr: 103819 +summary: Add retry logic for 500 and 503 errors for OpenAI +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/103865.yaml b/docs/changelog/103865.yaml deleted file mode 100644 index 5c9570f32c44e..0000000000000 --- a/docs/changelog/103865.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103865 -summary: Revert change -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/103873.yaml b/docs/changelog/103873.yaml deleted file mode 100644 index 937106043ecf4..0000000000000 --- a/docs/changelog/103873.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103873 -summary: Catch exceptions during `pytorch_inference` startup -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/103923.yaml b/docs/changelog/103923.yaml deleted file mode 100644 index 80e6880909f3a..0000000000000 --- a/docs/changelog/103923.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103923 -summary: Preserve response headers in Datafeed preview -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/103949.yaml b/docs/changelog/103949.yaml new file mode 100644 index 0000000000000..96bd76d89ceae --- /dev/null +++ b/docs/changelog/103949.yaml @@ -0,0 +1,5 @@ +pr: 103949 +summary: "ESQL: Introduce mode setting for ENRICH" +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/103959.yaml b/docs/changelog/103959.yaml new file mode 100644 index 0000000000000..4c8b4413b95f8 --- /dev/null +++ b/docs/changelog/103959.yaml @@ -0,0 +1,5 @@ +pr: 103959 +summary: Add `ApiKey` expiration time to audit log +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/104006.yaml b/docs/changelog/104006.yaml new file mode 100644 index 0000000000000..d840502cdefbe --- /dev/null +++ b/docs/changelog/104006.yaml @@ -0,0 +1,5 @@ +pr: 104006 +summary: Add support for more than one `inner_hit` when searching nested vectors +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/changelog/104026.yaml b/docs/changelog/104026.yaml new file mode 100644 index 0000000000000..d9aa704de1dbd --- /dev/null +++ b/docs/changelog/104026.yaml @@ -0,0 +1,5 @@ +pr: 104026 +summary: Include user's privileges actions in IdP plugin `_has_privileges` request +area: IdentityProvider +type: enhancement +issues: [] diff --git a/docs/changelog/104029.yaml b/docs/changelog/104029.yaml deleted file mode 100644 index 2b74d3b634dba..0000000000000 --- a/docs/changelog/104029.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104029 -summary: '`AsyncOperator#isFinished` must never return true on failure' -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104033.yaml b/docs/changelog/104033.yaml new file mode 100644 index 0000000000000..d3e167665732c --- /dev/null +++ b/docs/changelog/104033.yaml @@ -0,0 +1,5 @@ +pr: 104033 +summary: Add Query Users API +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/104046.yaml b/docs/changelog/104046.yaml deleted file mode 100644 index 9b383611b560a..0000000000000 --- a/docs/changelog/104046.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104046 -summary: "ESQL: Update the use of some user-caused exceptions" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104051.yaml b/docs/changelog/104051.yaml deleted file mode 100644 index 1aa6d69f5ae20..0000000000000 --- a/docs/changelog/104051.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104051 -summary: Fix NPE that is thrown by `_update` API -area: Transform -type: bug -issues: - - 104048 diff --git a/docs/changelog/104087.yaml b/docs/changelog/104087.yaml new file mode 100644 index 0000000000000..614e2d0de7e58 --- /dev/null +++ b/docs/changelog/104087.yaml @@ -0,0 +1,13 @@ +pr: 104087 +summary: Deprecate machine learning on Intel macOS +area: Machine Learning +type: deprecation +issues: [] +deprecation: + title: Deprecate machine learning on Intel macOS + area: Packaging + details: The machine learning plugin will be permanently disabled on macOS x86_64 + in new minor versions released from December 2024 onwards. + impact: To continue to use machine learning functionality on macOS please switch to + an arm64 machine (Apple silicon). Alternatively, it will still be possible to run + Elasticsearch with machine learning enabled in a Docker container on macOS x86_64. diff --git a/docs/changelog/104092.yaml b/docs/changelog/104092.yaml new file mode 100644 index 0000000000000..b40637d51765e --- /dev/null +++ b/docs/changelog/104092.yaml @@ -0,0 +1,5 @@ +pr: 104092 +summary: Ingest geoip processor cache 'no results' from the database +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/104122.yaml b/docs/changelog/104122.yaml new file mode 100644 index 0000000000000..a88d7499bd44e --- /dev/null +++ b/docs/changelog/104122.yaml @@ -0,0 +1,5 @@ +pr: 104122 +summary: Consider currently refreshing data in the memory usage of refresh +area: Engine +type: bug +issues: [] diff --git a/docs/changelog/104132.yaml b/docs/changelog/104132.yaml new file mode 100644 index 0000000000000..87fe94ddcfcea --- /dev/null +++ b/docs/changelog/104132.yaml @@ -0,0 +1,5 @@ +pr: 104132 +summary: Add support for the `simple_query_string` to the Query API Key API +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/104142.yaml b/docs/changelog/104142.yaml new file mode 100644 index 0000000000000..08bf9ef759090 --- /dev/null +++ b/docs/changelog/104142.yaml @@ -0,0 +1,5 @@ +pr: 104142 +summary: Expose token authentication metrics +area: Authentication +type: enhancement +issues: [] diff --git a/docs/changelog/104145.yaml b/docs/changelog/104145.yaml new file mode 100644 index 0000000000000..41dd1f97ebe8b --- /dev/null +++ b/docs/changelog/104145.yaml @@ -0,0 +1,6 @@ +pr: 104145 +summary: Fix _alias/ returning non-matching data streams +area: Data streams +type: bug +issues: + - 96589 diff --git a/docs/changelog/104150.yaml b/docs/changelog/104150.yaml new file mode 100644 index 0000000000000..c910542dcf7f6 --- /dev/null +++ b/docs/changelog/104150.yaml @@ -0,0 +1,5 @@ +pr: 104150 +summary: Correct profiled rewrite time for knn with a pre-filter +area: Search +type: bug +issues: [] diff --git a/docs/changelog/104155.yaml b/docs/changelog/104155.yaml new file mode 100644 index 0000000000000..04d6a9920310a --- /dev/null +++ b/docs/changelog/104155.yaml @@ -0,0 +1,6 @@ +pr: 104155 +summary: "Updated `missingTrainedModel` message to include: you may need to create\ + \ it" +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/104182.yaml b/docs/changelog/104182.yaml new file mode 100644 index 0000000000000..b5cf10f941cc6 --- /dev/null +++ b/docs/changelog/104182.yaml @@ -0,0 +1,5 @@ +pr: 104182 +summary: "Apm-data: fix `@custom` component templates" +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/104198.yaml b/docs/changelog/104198.yaml new file mode 100644 index 0000000000000..0b5b4680c2d88 --- /dev/null +++ b/docs/changelog/104198.yaml @@ -0,0 +1,5 @@ +pr: 104198 +summary: "[Connector API] Fix bug in configuration validation parser" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/104200.yaml b/docs/changelog/104200.yaml new file mode 100644 index 0000000000000..bc2aa2507f0ec --- /dev/null +++ b/docs/changelog/104200.yaml @@ -0,0 +1,5 @@ +pr: 104200 +summary: Expose realms authentication metrics +area: Authentication +type: enhancement +issues: [] diff --git a/docs/changelog/104209.yaml b/docs/changelog/104209.yaml new file mode 100644 index 0000000000000..fabf06fb99c2e --- /dev/null +++ b/docs/changelog/104209.yaml @@ -0,0 +1,13 @@ +pr: 104209 +summary: '`DesiredNode:` deprecate `node_version` field and make it optional (unused) + in current parser' +area: Distributed +type: deprecation +issues: [] +deprecation: + title: '`DesiredNode:` deprecate `node_version` field and make it optional for the current version' + area: REST API + details: The desired_node API includes a `node_version` field to perform validation on the new node version required. + This kind of check is too broad, and it's better done by external logic, so it has been removed, making the + `node_version` field not necessary. The field will be removed in a later version. + impact: Users should update their usages of `desired_node` to not include the `node_version` field anymore. diff --git a/docs/changelog/104218.yaml b/docs/changelog/104218.yaml new file mode 100644 index 0000000000000..b3051008dc47b --- /dev/null +++ b/docs/changelog/104218.yaml @@ -0,0 +1,6 @@ +pr: 104218 +summary: "Support ST_CENTROID over spatial points" +area: "ES|QL" +type: enhancement +issues: + - 104656 diff --git a/docs/changelog/104230.yaml b/docs/changelog/104230.yaml new file mode 100644 index 0000000000000..94184f64586f5 --- /dev/null +++ b/docs/changelog/104230.yaml @@ -0,0 +1,5 @@ +pr: 104230 +summary: Undeploy elser when inference model deleted +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/104265.yaml b/docs/changelog/104265.yaml new file mode 100644 index 0000000000000..88c3d72ee81d0 --- /dev/null +++ b/docs/changelog/104265.yaml @@ -0,0 +1,6 @@ +pr: 104265 +summary: Remove `hashCode` and `equals` from `OperationModeUpdateTask` +area: ILM+SLM +type: bug +issues: + - 100871 diff --git a/docs/changelog/104269.yaml b/docs/changelog/104269.yaml new file mode 100644 index 0000000000000..8d4b0fc5d5198 --- /dev/null +++ b/docs/changelog/104269.yaml @@ -0,0 +1,5 @@ +pr: 104269 +summary: "ESQL: Support loading shapes from source into WKB blocks" +area: "ES|QL" +type: enhancement +issues: [] diff --git a/docs/changelog/104281.yaml b/docs/changelog/104281.yaml new file mode 100644 index 0000000000000..087e91d83ab3b --- /dev/null +++ b/docs/changelog/104281.yaml @@ -0,0 +1,5 @@ +pr: 104281 +summary: Data streams fix failure store delete +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/104288.yaml b/docs/changelog/104288.yaml new file mode 100644 index 0000000000000..67f54e37cf9dc --- /dev/null +++ b/docs/changelog/104288.yaml @@ -0,0 +1,6 @@ +pr: 104288 +summary: Don't throw error for remote shards that open PIT filtered out +area: Search +type: bug +issues: + - 102596 diff --git a/docs/changelog/104289.yaml b/docs/changelog/104289.yaml new file mode 100644 index 0000000000000..9df8f8ecd4add --- /dev/null +++ b/docs/changelog/104289.yaml @@ -0,0 +1,6 @@ +pr: 104289 +summary: Better handling of async processor failures +area: Ingest Node +type: bug +issues: + - 101921 diff --git a/docs/changelog/104309.yaml b/docs/changelog/104309.yaml new file mode 100644 index 0000000000000..4467eb6722afc --- /dev/null +++ b/docs/changelog/104309.yaml @@ -0,0 +1,5 @@ +pr: 104309 +summary: "ESQL: Add TO_UPPER and TO_LOWER functions" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/104314.yaml b/docs/changelog/104314.yaml new file mode 100644 index 0000000000000..a17e810a2c023 --- /dev/null +++ b/docs/changelog/104314.yaml @@ -0,0 +1,5 @@ +pr: 104314 +summary: "[LTR] `FieldValueExtrator` - Checking if fetched values is empty" +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/104334.yaml b/docs/changelog/104334.yaml new file mode 100644 index 0000000000000..ff242ee15141b --- /dev/null +++ b/docs/changelog/104334.yaml @@ -0,0 +1,5 @@ +pr: 104334 +summary: Automatically download the ELSER model when PUT in `_inference` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/104355.yaml b/docs/changelog/104355.yaml new file mode 100644 index 0000000000000..2a100faf3c35f --- /dev/null +++ b/docs/changelog/104355.yaml @@ -0,0 +1,5 @@ +pr: 104355 +summary: Prepare enrich plan to support multi clusters +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/104356.yaml b/docs/changelog/104356.yaml new file mode 100644 index 0000000000000..e0cb2311fbfc9 --- /dev/null +++ b/docs/changelog/104356.yaml @@ -0,0 +1,5 @@ +pr: 104356 +summary: "[Profiling] Extract properties faster from source" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104386.yaml b/docs/changelog/104386.yaml new file mode 100644 index 0000000000000..41b6a17424bbd --- /dev/null +++ b/docs/changelog/104386.yaml @@ -0,0 +1,6 @@ +pr: 104386 +summary: "X-pack/plugin/apm-data: add dynamic setting for enabling template registry" +area: Data streams +type: enhancement +issues: + - 104385 diff --git a/docs/changelog/104387.yaml b/docs/changelog/104387.yaml new file mode 100644 index 0000000000000..f10084d8c4b32 --- /dev/null +++ b/docs/changelog/104387.yaml @@ -0,0 +1,6 @@ +pr: 104387 +summary: "ESQL: Nested expressions inside stats command" +area: ES|QL +type: enhancement +issues: + - 99828 diff --git a/docs/changelog/104394.yaml b/docs/changelog/104394.yaml new file mode 100644 index 0000000000000..39fbfc0c4ea28 --- /dev/null +++ b/docs/changelog/104394.yaml @@ -0,0 +1,5 @@ +pr: 104394 +summary: Endpoint to find positions of Grok pattern matches +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/104396.yaml b/docs/changelog/104396.yaml new file mode 100644 index 0000000000000..586fdc1b22624 --- /dev/null +++ b/docs/changelog/104396.yaml @@ -0,0 +1,5 @@ +pr: 104396 +summary: Report current master in `PeerFinder` +area: Cluster Coordination +type: enhancement +issues: [] diff --git a/docs/changelog/104406.yaml b/docs/changelog/104406.yaml new file mode 100644 index 0000000000000..d26ef664abc07 --- /dev/null +++ b/docs/changelog/104406.yaml @@ -0,0 +1,5 @@ +pr: 104406 +summary: Support patch transport version from 8.12 +area: Downsampling +type: enhancement +issues: [] diff --git a/docs/changelog/104407.yaml b/docs/changelog/104407.yaml new file mode 100644 index 0000000000000..1ce6b6f97f580 --- /dev/null +++ b/docs/changelog/104407.yaml @@ -0,0 +1,6 @@ +pr: 104407 +summary: Set read timeout for fetching IMDSv2 token +area: Discovery-Plugins +type: enhancement +issues: + - 104244 diff --git a/docs/changelog/104408.yaml b/docs/changelog/104408.yaml new file mode 100644 index 0000000000000..7303740168ea5 --- /dev/null +++ b/docs/changelog/104408.yaml @@ -0,0 +1,5 @@ +pr: 104408 +summary: Move `TransportTermsEnumAction` coordination off transport threads +area: Search +type: bug +issues: [] diff --git a/docs/changelog/104418.yaml b/docs/changelog/104418.yaml new file mode 100644 index 0000000000000..d27b66cebea87 --- /dev/null +++ b/docs/changelog/104418.yaml @@ -0,0 +1,6 @@ +pr: 104418 +summary: Fix `routing_path` when template has multiple `path_match` and multi-fields +area: TSDB +type: bug +issues: + - 104400 diff --git a/docs/changelog/104433.yaml b/docs/changelog/104433.yaml new file mode 100644 index 0000000000000..b3b292923e290 --- /dev/null +++ b/docs/changelog/104433.yaml @@ -0,0 +1,5 @@ +pr: 104433 +summary: Added 3 automatic restarts for `pytorch_inference` processes which stop unexpectedly +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/104460.yaml b/docs/changelog/104460.yaml new file mode 100644 index 0000000000000..c92acdd5cb8ad --- /dev/null +++ b/docs/changelog/104460.yaml @@ -0,0 +1,5 @@ +pr: 104460 +summary: Dyamically adjust node metrics cache expire +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/104500.yaml b/docs/changelog/104500.yaml new file mode 100644 index 0000000000000..61c45c6dde3cb --- /dev/null +++ b/docs/changelog/104500.yaml @@ -0,0 +1,5 @@ +pr: 104500 +summary: Thread pool metrics +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/104505.yaml b/docs/changelog/104505.yaml new file mode 100644 index 0000000000000..4d0c482a88d85 --- /dev/null +++ b/docs/changelog/104505.yaml @@ -0,0 +1,5 @@ +pr: 104505 +summary: "Revert \"x-pack/plugin/apm-data: download geoip DB on pipeline creation\"" +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/104523.yaml b/docs/changelog/104523.yaml new file mode 100644 index 0000000000000..d9e7d207dc23a --- /dev/null +++ b/docs/changelog/104523.yaml @@ -0,0 +1,5 @@ +pr: 104523 +summary: "ESQL: Allow grouping by null blocks" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/104553.yaml b/docs/changelog/104553.yaml new file mode 100644 index 0000000000000..e1f5c974bd74e --- /dev/null +++ b/docs/changelog/104553.yaml @@ -0,0 +1,5 @@ +pr: 104553 +summary: "ESQL: Fix a bug loading unindexed text fields" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/104559.yaml b/docs/changelog/104559.yaml new file mode 100644 index 0000000000000..d6d030783c4cc --- /dev/null +++ b/docs/changelog/104559.yaml @@ -0,0 +1,5 @@ +pr: 104559 +summary: Adding support for Cohere inference service +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/104573.yaml b/docs/changelog/104573.yaml new file mode 100644 index 0000000000000..a333bc3024772 --- /dev/null +++ b/docs/changelog/104573.yaml @@ -0,0 +1,5 @@ +pr: 104573 +summary: Fix logger Strings.format calls +area: Distributed +type: bug +issues: [] diff --git a/docs/changelog/104575.yaml b/docs/changelog/104575.yaml new file mode 100644 index 0000000000000..ba17b705fca10 --- /dev/null +++ b/docs/changelog/104575.yaml @@ -0,0 +1,5 @@ +pr: 104575 +summary: Introduce Alias.unwrap method +area: "Query Languages" +type: enhancement +issues: [] diff --git a/docs/changelog/104581.yaml b/docs/changelog/104581.yaml new file mode 100644 index 0000000000000..5f9b71acbfed7 --- /dev/null +++ b/docs/changelog/104581.yaml @@ -0,0 +1,6 @@ +pr: 104581 +summary: Fix bogus assertion tripped by force-executed tasks +area: Infra/Core +type: bug +issues: + - 104580 diff --git a/docs/changelog/104585.yaml b/docs/changelog/104585.yaml new file mode 100644 index 0000000000000..8c2b20fe54d0c --- /dev/null +++ b/docs/changelog/104585.yaml @@ -0,0 +1,6 @@ +pr: 104585 +summary: Ingest correctly handle upsert operations and drop processors together +area: Ingest Node +type: bug +issues: + - 36746 diff --git a/docs/changelog/104586.yaml b/docs/changelog/104586.yaml new file mode 100644 index 0000000000000..db1d01c22eff6 --- /dev/null +++ b/docs/changelog/104586.yaml @@ -0,0 +1,6 @@ +pr: 104586 +summary: Reduce the number of Evals `ReplaceMissingFieldWithNull` creates +area: ES|QL +type: bug +issues: + - 104583 diff --git a/docs/changelog/104591.yaml b/docs/changelog/104591.yaml new file mode 100644 index 0000000000000..0bd054385753f --- /dev/null +++ b/docs/changelog/104591.yaml @@ -0,0 +1,5 @@ +pr: 104591 +summary: Avoid execute ESQL planning on refresh thread +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/104600.yaml b/docs/changelog/104600.yaml new file mode 100644 index 0000000000000..5337116ba37bc --- /dev/null +++ b/docs/changelog/104600.yaml @@ -0,0 +1,5 @@ +pr: 104600 +summary: "[Profiling] Query in parallel on content nodes" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/104606.yaml b/docs/changelog/104606.yaml new file mode 100644 index 0000000000000..f419c21e0a17d --- /dev/null +++ b/docs/changelog/104606.yaml @@ -0,0 +1,6 @@ +pr: 104606 +summary: Fix bug when `latest` transform is used together with `from` parameter +area: Transform +type: bug +issues: + - 104543 diff --git a/docs/changelog/104643.yaml b/docs/changelog/104643.yaml new file mode 100644 index 0000000000000..5a09cd081b376 --- /dev/null +++ b/docs/changelog/104643.yaml @@ -0,0 +1,5 @@ +pr: 104643 +summary: "[Connectors API] Implement update service type action" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104654.yaml b/docs/changelog/104654.yaml new file mode 100644 index 0000000000000..1d007ad39a854 --- /dev/null +++ b/docs/changelog/104654.yaml @@ -0,0 +1,5 @@ +pr: 104654 +summary: "[Connectors API] Implement update native action endpoint" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104666.yaml b/docs/changelog/104666.yaml new file mode 100644 index 0000000000000..5009052bd5b0a --- /dev/null +++ b/docs/changelog/104666.yaml @@ -0,0 +1,5 @@ +pr: 104666 +summary: Require the name field for `inner_hits` for collapse +area: Search +type: bug +issues: [] diff --git a/docs/changelog/104674.yaml b/docs/changelog/104674.yaml new file mode 100644 index 0000000000000..12951488f89ce --- /dev/null +++ b/docs/changelog/104674.yaml @@ -0,0 +1,5 @@ +pr: 104674 +summary: "[Profiling] Speed up processing of stacktraces" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104718.yaml b/docs/changelog/104718.yaml new file mode 100644 index 0000000000000..ffe889bb28a3e --- /dev/null +++ b/docs/changelog/104718.yaml @@ -0,0 +1,6 @@ +pr: 104718 +summary: "ESQL: Fix replacement of nested expressions in aggs with multiple parameters" +area: ES|QL +type: bug +issues: + - 104706 diff --git a/docs/changelog/104721.yaml b/docs/changelog/104721.yaml new file mode 100644 index 0000000000000..3bfe8a21646c8 --- /dev/null +++ b/docs/changelog/104721.yaml @@ -0,0 +1,6 @@ +pr: 104721 +summary: Add default rollover conditions to ILM explain API response +area: ILM+SLM +type: enhancement +issues: + - 103395 diff --git a/docs/changelog/104722.yaml b/docs/changelog/104722.yaml new file mode 100644 index 0000000000000..ed9f2d41ff908 --- /dev/null +++ b/docs/changelog/104722.yaml @@ -0,0 +1,6 @@ +pr: 104722 +summary: Avoid possible datafeed infinite loop with filtering aggregations +area: Machine Learning +type: bug +issues: + - 104699 diff --git a/docs/changelog/104730.yaml b/docs/changelog/104730.yaml new file mode 100644 index 0000000000000..fe5e2e157a004 --- /dev/null +++ b/docs/changelog/104730.yaml @@ -0,0 +1,5 @@ +pr: 104730 +summary: "[Profiling] Support downsampling of generic events" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104753.yaml b/docs/changelog/104753.yaml new file mode 100644 index 0000000000000..f95fd3da44084 --- /dev/null +++ b/docs/changelog/104753.yaml @@ -0,0 +1,5 @@ +pr: 104753 +summary: Upgrade to Lucene 9.9.2 +area: Search +type: upgrade +issues: [] diff --git a/docs/changelog/104778.yaml b/docs/changelog/104778.yaml new file mode 100644 index 0000000000000..7dae338efc09c --- /dev/null +++ b/docs/changelog/104778.yaml @@ -0,0 +1,5 @@ +pr: 104778 +summary: Adding a `RequestBuilder` interface +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/104784.yaml b/docs/changelog/104784.yaml new file mode 100644 index 0000000000000..3d60222c2aa19 --- /dev/null +++ b/docs/changelog/104784.yaml @@ -0,0 +1,5 @@ +pr: 104784 +summary: "Fix blob cache race, decay, time dependency" +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/104787.yaml b/docs/changelog/104787.yaml new file mode 100644 index 0000000000000..9c4ce688ce6ad --- /dev/null +++ b/docs/changelog/104787.yaml @@ -0,0 +1,5 @@ +pr: 104787 +summary: Add troubleshooting docs link to `PeerFinder` logs +area: Cluster Coordination +type: enhancement +issues: [] diff --git a/docs/changelog/104802.yaml b/docs/changelog/104802.yaml new file mode 100644 index 0000000000000..d535318043ca2 --- /dev/null +++ b/docs/changelog/104802.yaml @@ -0,0 +1,5 @@ +pr: 104802 +summary: "[Connectors API] Fix bug when triggering a sync job via API" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/104808.yaml b/docs/changelog/104808.yaml new file mode 100644 index 0000000000000..7682db085c7a9 --- /dev/null +++ b/docs/changelog/104808.yaml @@ -0,0 +1,5 @@ +pr: 104808 +summary: Fix lost headers with chunked responses +area: Network +type: bug +issues: [] diff --git a/docs/changelog/104840.yaml b/docs/changelog/104840.yaml new file mode 100644 index 0000000000000..5b7d83a966dbc --- /dev/null +++ b/docs/changelog/104840.yaml @@ -0,0 +1,5 @@ +pr: 104840 +summary: Support enrich ANY mode in cross clusters query +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/104870.yaml b/docs/changelog/104870.yaml new file mode 100644 index 0000000000000..65bc9a964eb3e --- /dev/null +++ b/docs/changelog/104870.yaml @@ -0,0 +1,7 @@ +pr: 104870 +summary: Make `_reset` action stop transforms without force first +area: Transform +type: bug +issues: + - 100596 + - 104825 diff --git a/docs/changelog/104893.yaml b/docs/changelog/104893.yaml new file mode 100644 index 0000000000000..e4685e160f8f8 --- /dev/null +++ b/docs/changelog/104893.yaml @@ -0,0 +1,5 @@ +pr: 104893 +summary: Release resources in `BestBucketsDeferringCollector` earlier +area: Aggregations +type: enhancement +issues: [] diff --git a/docs/changelog/104904.yaml b/docs/changelog/104904.yaml new file mode 100644 index 0000000000000..07e22feb144ed --- /dev/null +++ b/docs/changelog/104904.yaml @@ -0,0 +1,5 @@ +pr: 104904 +summary: Improve `CANNOT_REBALANCE_CAN_ALLOCATE` explanation +area: Allocation +type: bug +issues: [] diff --git a/docs/changelog/96968.yaml b/docs/changelog/96968.yaml deleted file mode 100644 index 8cc6d4ac4c284..0000000000000 --- a/docs/changelog/96968.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 96968 -summary: Allow prefix index naming while reindexing from remote -area: Reindex -type: bug -issues: - - 89120 diff --git a/docs/changelog/98874.yaml b/docs/changelog/98874.yaml deleted file mode 100644 index e3eb7b5acc63f..0000000000000 --- a/docs/changelog/98874.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98874 -summary: Estimate the memory required to deploy trained models more accurately -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/98882.yaml b/docs/changelog/98882.yaml deleted file mode 100644 index 9867f098cfd13..0000000000000 --- a/docs/changelog/98882.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99983 -summary: Use non-deprecated SAML callback URL in tests -area: Authorization -type: enhancement -issues: - - 99985 diff --git a/docs/changelog/98883.yaml b/docs/changelog/98883.yaml deleted file mode 100644 index a8525a432d142..0000000000000 --- a/docs/changelog/98883.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99983 -summary: Use non-deprecated SAML callback URL in SAML smoketests -area: Authorization -type: enhancement -issues: - - 99986 diff --git a/docs/changelog/98916.yaml b/docs/changelog/98916.yaml deleted file mode 100644 index a466e3deba009..0000000000000 --- a/docs/changelog/98916.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98916 -summary: Make knn search a query -area: Vector Search -type: feature -issues: [] diff --git a/docs/changelog/99134.yaml b/docs/changelog/99134.yaml deleted file mode 100644 index 10156b9b30066..0000000000000 --- a/docs/changelog/99134.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99134 -summary: Add ability to create a data stream failure store -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/99445.yaml b/docs/changelog/99445.yaml deleted file mode 100644 index deea5fbf2423c..0000000000000 --- a/docs/changelog/99445.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99445 -summary: Make cosine similarity faster by storing magnitude and normalizing vectors -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/99702.yaml b/docs/changelog/99702.yaml deleted file mode 100644 index 657ff34e045a8..0000000000000 --- a/docs/changelog/99702.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99702 -summary: Making classname optional in Transport protocol -area: Infra/Plugins -type: bug -issues: - - 98584 diff --git a/docs/changelog/99752.yaml b/docs/changelog/99752.yaml deleted file mode 100644 index c137a563bea39..0000000000000 --- a/docs/changelog/99752.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99752 -summary: Pass shard's primary term to Engine#addSegmentGenerationListener -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/99852.yaml b/docs/changelog/99852.yaml deleted file mode 100644 index 3a26f17737ae8..0000000000000 --- a/docs/changelog/99852.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99852 -summary: Record more detailed HTTP stats -area: Network -type: enhancement -issues: [] diff --git a/docs/changelog/99963.yaml b/docs/changelog/99963.yaml deleted file mode 100644 index 4f03dceeb22aa..0000000000000 --- a/docs/changelog/99963.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99963 -summary: Aggs error codes part 1 -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/99975.yaml b/docs/changelog/99975.yaml deleted file mode 100644 index a34746c27ec99..0000000000000 --- a/docs/changelog/99975.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99975 -summary: Rename component templates and pipelines according to the new naming conventions -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/99984.yaml b/docs/changelog/99984.yaml deleted file mode 100644 index 254845591941d..0000000000000 --- a/docs/changelog/99984.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99984 -summary: Switch fleet's built-in ILM policies to use .actions.rollover.max_primary_shard_size -area: ILM+SLM -type: enhancement -issues: - - 99983 diff --git a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc index 99488afeb98ee..c910b0431a6ea 100644 --- a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc @@ -75,6 +75,9 @@ task. In this situation, the following task must be resumed manually with the `index`:: (string) The name of the follower index. +`total_global_checkpoint_lag`:: +(long) Indication of how much the follower is lagging the leader. This is the sum of the difference between the `leader_global_checkpoint` and the `follower_global_checkpoint` for all shards. + //Begin shards `shards`:: (array) An array of shard-level following task statistics. @@ -219,6 +222,7 @@ The API returns the following results: "indices" : [ { "index" : "follower_index", + "total_global_checkpoint_lag" : 256, "shards" : [ { "remote_cluster" : "remote_cluster", @@ -255,6 +259,7 @@ The API returns the following results: ] } -------------------------------------------------- +// TESTRESPONSE[s/"total_global_checkpoint_lag" : 256/"total_global_checkpoint_lag" : 0/] // TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.indices.0.shards.0.leader_global_checkpoint/] // TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.indices.0.shards.0.leader_max_seq_no/] // TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.indices.0.shards.0.follower_global_checkpoint/] diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 6d43e089c2471..02f5cf886049d 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -112,6 +112,7 @@ The API returns the following results: "indices" : [ { "index" : "follower_index", + "total_global_checkpoint_lag" : 256, "shards" : [ { "remote_cluster" : "remote_cluster", @@ -149,6 +150,7 @@ The API returns the following results: } } -------------------------------------------------- +// TESTRESPONSE[s/"total_global_checkpoint_lag" : 256/"total_global_checkpoint_lag" : 0/] // TESTRESPONSE[s/"number_of_failed_follow_indices" : 0/"number_of_failed_follow_indices" : $body.auto_follow_stats.number_of_failed_follow_indices/] // TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/] // TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/] diff --git a/docs/reference/cluster/delete-desired-nodes.asciidoc b/docs/reference/cluster/delete-desired-nodes.asciidoc index 78baa63f694f8..a58d19e2dfa3f 100644 --- a/docs/reference/cluster/delete-desired-nodes.asciidoc +++ b/docs/reference/cluster/delete-desired-nodes.asciidoc @@ -27,8 +27,7 @@ PUT /_internal/desired_nodes/history/1 }, "processors" : 8.0, "memory" : "58gb", - "storage" : "2tb", - "node_version" : "{version}" + "storage" : "2tb" } ] } diff --git a/docs/reference/cluster/get-desired-nodes.asciidoc b/docs/reference/cluster/get-desired-nodes.asciidoc index 0485772c8bf69..de27bd657b3ff 100644 --- a/docs/reference/cluster/get-desired-nodes.asciidoc +++ b/docs/reference/cluster/get-desired-nodes.asciidoc @@ -27,8 +27,7 @@ PUT /_internal/desired_nodes/my_history/1 }, "processors" : 8.0, "memory" : "59gb", - "storage" : "2tb", - "node_version" : "{version}" + "storage" : "2tb" } ] } @@ -79,8 +78,7 @@ The API returns the following result: "settings": , "processors": , "memory": "", - "storage": "", - "node_version": "" + "storage": "" } ] } diff --git a/docs/reference/cluster/update-desired-nodes.asciidoc b/docs/reference/cluster/update-desired-nodes.asciidoc index 986c8e47d9587..b7bbb8b3b7f4f 100644 --- a/docs/reference/cluster/update-desired-nodes.asciidoc +++ b/docs/reference/cluster/update-desired-nodes.asciidoc @@ -26,8 +26,7 @@ PUT /_internal/desired_nodes// }, "processors" : 8.0, "memory" : "58gb", - "storage" : "2tb", - "node_version" : "{version}" + "storage" : "2tb" } ] } @@ -86,8 +85,7 @@ PUT /_internal/desired_nodes/Ywkh3INLQcuPT49f6kcppA/100 }, "processors" : 8.0, "memory" : "58gb", - "storage" : "2tb", - "node_version" : "{version}" + "storage" : "2tb" } ] } @@ -125,8 +123,7 @@ PUT /_internal/desired_nodes/Ywkh3INLQcuPT49f6kcppA/101 }, "processors_range" : {"min": 8.0, "max": 10.0}, "memory" : "58gb", - "storage" : "2tb", - "node_version" : "{version}" + "storage" : "2tb" } ] } diff --git a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc index 6123b7eb5511d..bd886bf923af8 100644 --- a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc @@ -4,6 +4,8 @@ Cancel connector sync job ++++ +preview::[] + Cancels a connector sync job. [[cancel-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/check-in-connector-api.asciidoc b/docs/reference/connector/apis/check-in-connector-api.asciidoc index c0c021f1304dc..9f88c595e3a67 100644 --- a/docs/reference/connector/apis/check-in-connector-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-api.asciidoc @@ -1,12 +1,11 @@ [[check-in-connector-api]] === Check in connector API - -preview::[] - ++++ Check in a connector ++++ +preview::[] + Updates the `last_seen` field of a connector with current timestamp. [[check-in-connector-api-request]] diff --git a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc index 04c8057e2c115..d3cc34bf025ed 100644 --- a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc @@ -4,6 +4,8 @@ Check in connector sync job ++++ +preview::[] + Checks in a connector sync job (updates `last_seen` to the current time). [[check-in-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index e127dc07446b5..eabb531551fe5 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -3,17 +3,16 @@ preview::[] -++++ -Connector APIs -++++ - ---- - -The connector and sync jobs API provides a convenient way to create and manage Elastic connectors and sync jobs in an internal index. +The connector and sync jobs API provides a convenient way to create and manage Elastic {enterprise-search-ref}/connectors.html[connectors^] and sync jobs in an internal index. This API provides an alternative to relying solely on {kib} UI for connector and sync job management. The API comes with a set of validations and assertions to ensure that the state representation in the internal index remains valid. +[TIP] +==== +We also have a command-line interface for Elastic connectors. Learn more in the https://github.com/elastic/connectors/blob/main/docs/CLI.md[elastic/connectors] repository. +==== + [discrete] [[elastic-connector-apis]] === Connector APIs diff --git a/docs/reference/connector/apis/create-connector-api.asciidoc b/docs/reference/connector/apis/create-connector-api.asciidoc index b62ca4ad070a4..2c1c4c9ba7bc4 100644 --- a/docs/reference/connector/apis/create-connector-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-api.asciidoc @@ -4,6 +4,8 @@ Create connector ++++ +preview::[] + Creates a connector. diff --git a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc index e8c2c364797c4..b036485285256 100644 --- a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc @@ -4,6 +4,9 @@ Create connector sync job ++++ +preview::[] + + Creates a connector sync job. [source, console] diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc index 6d3a120df785a..c7e9dcd94d2ad 100644 --- a/docs/reference/connector/apis/delete-connector-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-api.asciidoc @@ -1,12 +1,11 @@ [[delete-connector-api]] === Delete connector API - -preview::[] - ++++ Delete connector ++++ +preview::[] + Removes a connector and its associated data. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc index 8641794576bf1..32df172df758a 100644 --- a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc @@ -1,12 +1,11 @@ [[delete-connector-sync-job-api]] === Delete connector sync job API - -preview::[] - ++++ Delete connector sync job ++++ +preview::[] + Removes a connector sync job and its associated data. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/get-connector-api.asciidoc b/docs/reference/connector/apis/get-connector-api.asciidoc index ab4a2758ce4f1..693a9fd767806 100644 --- a/docs/reference/connector/apis/get-connector-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-api.asciidoc @@ -1,10 +1,11 @@ [[get-connector-api]] === Get connector API -preview::[] ++++ Get connector ++++ +preview::[] + Retrieves the details about a connector. [[get-connector-api-request]] diff --git a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc index b33aec8c55e60..bfa82ea0d345c 100644 --- a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc @@ -1,10 +1,11 @@ [[get-connector-sync-job-api]] === Get connector sync job API -preview::[] ++++ Get connector sync job ++++ +preview::[] + Retrieves the details about a connector sync job. [[get-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 8b88f318f5304..a8851885b5051 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -1,13 +1,12 @@ [role="xpack"] [[list-connector-sync-jobs-api]] === List connector sync jobs API - -preview::[] - ++++ List connector sync jobs ++++ +preview::[] + Returns information about all stored connector sync jobs ordered by their creation date in ascending order. diff --git a/docs/reference/connector/apis/list-connectors-api.asciidoc b/docs/reference/connector/apis/list-connectors-api.asciidoc index 57d3cc47aeb7a..9b3fc50690243 100644 --- a/docs/reference/connector/apis/list-connectors-api.asciidoc +++ b/docs/reference/connector/apis/list-connectors-api.asciidoc @@ -1,13 +1,12 @@ [role="xpack"] [[list-connector-api]] === List connectors API - -preview::[] - ++++ List connectors ++++ +preview::[] + Returns information about all stored connectors. diff --git a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc index 935fcccc77fcf..a9dbf5ceb1eb2 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc @@ -4,6 +4,8 @@ Set connector sync job error ++++ +preview::[] + Sets a connector sync job error. [[set-connector-sync-job-error-api-request]] diff --git a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc index 0513155312bb4..a417bcf8b9e9f 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc @@ -4,6 +4,8 @@ Set connector sync job stats ++++ +preview::[] + Sets connector sync job stats. [[set-connector-sync-job-stats-api-request]] diff --git a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc index 6d6591a6f00bc..57484c14d0f90 100644 --- a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-configuration-api]] === Update connector configuration API - -preview::[] - ++++ Update connector configuration ++++ +preview::[] + Updates the `configuration` of a connector. diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc index 19bc15f0dc60a..dbed25f1bf8d5 100644 --- a/docs/reference/connector/apis/update-connector-error-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-error-api]] === Update connector error API - -preview::[] - ++++ Update connector error ++++ +preview::[] + Updates the `error` field of a connector. [[update-connector-error-api-request]] diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc index d4c7bb16a3304..3e81f0fda2ce7 100644 --- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -1,12 +1,12 @@ [[update-connector-filtering-api]] === Update connector filtering API - -preview::[] - ++++ Update connector filtering ++++ +preview::[] + + Updates the `filtering` configuration of a connector. Learn more about filtering in the {enterprise-search-ref}/sync-rules.html[sync rules] documentation. [[update-connector-filtering-api-request]] diff --git a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc index e9fffd22b21cd..6f41925e3676f 100644 --- a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-last-sync-api]] === Update connector last sync stats API - -preview::[] - ++++ Update connector last sync stats ++++ +preview::[] + Updates the fields related to the last sync of a connector. This action is used for analytics and monitoring. diff --git a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc index d45fb545e168b..c54dba8dd72b5 100644 --- a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc @@ -1,12 +1,12 @@ [[update-connector-name-description-api]] === Update connector name and description API - -preview::[] - ++++ Update connector name and description ++++ +preview::[] + + Updates the `name` and `description` fields of a connector. [[update-connector-name-description-api-request]] diff --git a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc index 6938506703da8..63872bf96aa55 100644 --- a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-pipeline-api]] === Update connector pipeline API - -preview::[] - ++++ Update connector pipeline ++++ +preview::[] + Updates the `pipeline` configuration of a connector. When you create a new connector, the configuration of an <> is populated with default settings. diff --git a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc index c47e6d4c0367b..7a2f33bcaeaa8 100644 --- a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-scheduling-api]] === Update connector scheduling API - -preview::[] - ++++ Update connector scheduling ++++ +preview::[] + Updates the `scheduling` configuration of a connector. [[update-connector-scheduling-api-request]] diff --git a/docs/reference/data-streams/downsampling.asciidoc b/docs/reference/data-streams/downsampling.asciidoc index 5e31a90bfb959..cac73787fc018 100644 --- a/docs/reference/data-streams/downsampling.asciidoc +++ b/docs/reference/data-streams/downsampling.asciidoc @@ -135,7 +135,29 @@ downsampled. * For <>, only `fixed_intervals` (and not calendar-aware intervals) are supported. -* Only Coordinated Universal Time (UTC) date-times are supported. +* Timezone support comes with caveats: + +** Date histograms at intervals that are multiples of an hour are based on +values generated at UTC. This works well for timezones that are on the hour, e.g. ++5:00 or -3:00, but requires offsetting the reported time buckets, e.g. +`2020-01-01T10:30:00.000` instead of `2020-03-07T10:00:00.000` for +timezone +5:30 (India), if downsampling aggregates values per hour. In this case, +the results include the field `downsampled_results_offset: true`, to indicate that +the time buckets are shifted. This can be avoided if a downsampling interval of 15 +minutes is used, as it allows properly calculating hourly values for the shifted +buckets. + +** Date histograms at intervals that are multiples of a day are similarly +affected, in case downsampling aggregates values per day. In this case, the +beginning of each day is always calculated at UTC when generated the downsampled +values, so the time buckets need to be shifted, e.g. reported as +`2020-03-07T19:00:00.000` instead of `2020-03-07T00:00:00.000` for timezone `America/New_York`. +The field `downsampled_results_offset: true` is added in this case too. + +** Daylight savings and similar peculiarities around timezones affect +reported results, as <> +for date histogram aggregation. Besides, downsampling at daily interval +hinders tracking any information related to daylight savings changes. [discrete] [[downsampling-restrictions]] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 0a78a923523cc..0d15eb313a61f 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -93,8 +93,9 @@ parameters: Timeout duration to wait for the request to finish. Defaults to a 1 second, meaning the request waits for 1 second for the query results. -If this parameter is specified and the request completes during this period, -complete results are returned. +If the query completes during this period then results will be +returned. Otherwise, a query `id` is returned that can later be used to +retrieve the results. If the request does not complete during this period, a query <> is returned. diff --git a/docs/reference/esql/esql-kibana.asciidoc b/docs/reference/esql/esql-kibana.asciidoc index 9534d004414f2..07502add5a620 100644 --- a/docs/reference/esql/esql-kibana.asciidoc +++ b/docs/reference/esql/esql-kibana.asciidoc @@ -261,7 +261,10 @@ of rows that are retrieved by the query and displayed in Discover. Queries and aggregations run on the full data set. * Discover shows no more than 50 columns. If a query returns more than 50 columns, Discover only shows the first 50. -* Querying many many indices at once without any filters can cause an error in +* CSV export from Discover shows no more than 10,000 rows. This limit only applies to the number +of rows that are retrieved by the query and displayed in Discover. Queries and +aggregations run on the full data set. +* Querying many indices at once without any filters can cause an error in kibana which looks like `[esql] > Unexpected error from Elasticsearch: The content length (536885793) is bigger than the maximum allowed string (536870888)`. The response from {esql} is too long. Use <> or diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 00f5b056c7ebe..f3b3dd824fb22 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -36,7 +36,9 @@ include::processing-commands/limit.asciidoc[tag=limitation] * `version` * Spatial types ** `geo_point` +** `geo_shape` ** `point` +** `shape` [discrete] ==== Unsupported types @@ -47,9 +49,6 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `counter` ** `position` ** `aggregate_metric_double` -* Spatial types -** `geo_shape` -** `shape` * Date/time ** `date_nanos` ** `date_range` diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index bbfa41538528a..e1e27be12a36f 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -43,6 +43,12 @@ or alias you search. (Optional, string) Separator for CSV results. Defaults to `,`. The API only supports this parameter for CSV responses. +`drop_null_columns`:: +(Optional, boolean) Should columns that are entirely `null` be removed from +the `columns` and `values` portion of the results? Defaults to `false`. If +`true` the the response will include an extra section under the name +`all_columns` which has the name of all columns. + `format`:: (Optional, string) Format for the response. For valid values, refer to <>. @@ -75,17 +81,12 @@ responses. See <>. `columns`:: (array of objects) -Column headings for the search results. Each object is a column. -+ -.Properties of `columns` objects -[%collapsible%open] -===== -`name`:: -(string) Name of the column. - -`type`:: -(string) Data type for the column. -===== +Column `name` and `type` for each column returned in `values`. Each object is a single column. + +`all_columns`:: +(array of objects) +Column `name` and `type` for each queried column. Each object is a single column. This is only +returned if `drop_null_columns` is sent with the request. `rows`:: (array of arrays) diff --git a/docs/reference/esql/esql-security-solution.asciidoc b/docs/reference/esql/esql-security-solution.asciidoc index 45e8e44e44bdd..24766a5ef93f1 100644 --- a/docs/reference/esql/esql-security-solution.asciidoc +++ b/docs/reference/esql/esql-security-solution.asciidoc @@ -14,7 +14,7 @@ questions about the {esql} query language. === Use {esql} to investigate events in Timeline You can use {esql} in Timeline to filter, transform, and analyze event data -stored in {es}. To start using {esql}, open the the **{esql}** tab. To learn +stored in {es}. To start using {esql}, open the **{esql}** tab. To learn more, refer to {security-guide}/timelines-ui.html#esql-in-timeline[Investigate events in Timeline]. @@ -38,4 +38,4 @@ the {esql} query language. To learn more, refer to NOTE: For AI Assistant to answer questions about {esql} and write {esql} queries, you need to {security-guide}/security-assistant.html#set-up-ai-assistant[enable knowledge -base]. \ No newline at end of file +base]. diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index bd501ea49f158..91293728fd45c 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -16,6 +16,7 @@ The <> function supports these aggregate functions: * <> * <> * <> +* <> * <> // end::agg_list[] @@ -27,4 +28,5 @@ include::median.asciidoc[] include::median-absolute-deviation.asciidoc[] include::min.asciidoc[] include::percentile.asciidoc[] +include::st_centroid.asciidoc[] include::sum.asciidoc[] diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc index 222f6879785ef..a326852e9b016 100644 --- a/docs/reference/esql/functions/asin.asciidoc +++ b/docs/reference/esql/functions/asin.asciidoc @@ -14,9 +14,8 @@ Numeric expression. If `null`, the function returns `null`. *Description* -Returns the -https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arcsine] -of the input numeric expression as an angle, expressed in radians. +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +numeric expression as an angle, expressed in radians. *Supported types* diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc index bdbbd07cbba60..604fc4d0bbecc 100644 --- a/docs/reference/esql/functions/atan.asciidoc +++ b/docs/reference/esql/functions/atan.asciidoc @@ -14,9 +14,8 @@ Numeric expression. If `null`, the function returns `null`. *Description* -Returns the -https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arctangent] of the -input numeric expression as an angle, expressed in radians. +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +numeric expression as an angle, expressed in radians. *Supported types* diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc index 3ecc0ff86fe26..1920b4b7ac1a0 100644 --- a/docs/reference/esql/functions/atan2.asciidoc +++ b/docs/reference/esql/functions/atan2.asciidoc @@ -17,9 +17,8 @@ Numeric expression. If `null`, the function returns `null`. *Description* -The https://en.wikipedia.org/wiki/Atan2[angle] between the positive x-axis and -the ray from the origin to the point (x , y) in the Cartesian plane, expressed -in radians. +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +origin to the point (x , y) in the Cartesian plane, expressed in radians. *Supported types* diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/auto_bucket.asciidoc index 2301939cf5050..aedfdaa7c0e12 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/auto_bucket.asciidoc @@ -6,13 +6,13 @@ [source,esql] ---- -AUTO_BUCKET(field, buckets, from, to) +AUTO_BUCKET(expression, buckets, from, to) ---- *Parameters* `field`:: -Numeric or date column from which to derive buckets. +Numeric or date expression from which to derive buckets. `buckets`:: Target number of buckets. diff --git a/docs/reference/esql/functions/avg.asciidoc b/docs/reference/esql/functions/avg.asciidoc index 6345be99c5d6d..9a6f5a82d1959 100644 --- a/docs/reference/esql/functions/avg.asciidoc +++ b/docs/reference/esql/functions/avg.asciidoc @@ -6,15 +6,15 @@ [source,esql] ---- -AVG(column) +AVG(expression) ---- -`column`:: -Numeric column. If `null`, the function returns `null`. +`expression`:: +Numeric expression. If `null`, the function returns `null`. *Description* -The average of a numeric field. +The average of a numeric expression. *Supported types* diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc index f7874d46c558a..a5a0251bbd70a 100644 --- a/docs/reference/esql/functions/cos.asciidoc +++ b/docs/reference/esql/functions/cos.asciidoc @@ -14,8 +14,8 @@ Numeric expression. If `null`, the function returns `null`. *Description* -Returns the https://en.wikipedia.org/wiki/Sine_and_cosine[cosine] of `n`. Input -expected in radians. +Returns the {wikipedia}/Sine_and_cosine[cosine] of `n`. Input expected in +radians. *Supported types* diff --git a/docs/reference/esql/functions/cosh.asciidoc b/docs/reference/esql/functions/cosh.asciidoc index ae813e91ec9bb..5883bc4b9d0c4 100644 --- a/docs/reference/esql/functions/cosh.asciidoc +++ b/docs/reference/esql/functions/cosh.asciidoc @@ -12,14 +12,13 @@ image::esql/functions/signature/cosh.svg[Embedded,opts=inline] `n`:: Numeric expression. If `null`, the function returns `null`. -*Supported types* +*Description* -include::types/cosh.asciidoc[] +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine]. -*Description* +*Supported types* -Returns the https://en.wikipedia.org/wiki/Hyperbolic_functions[hyperbolic -cosine]. +include::types/cosh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/count-distinct.asciidoc b/docs/reference/esql/functions/count-distinct.asciidoc index 14fa6eff39d4c..04a200935cd48 100644 --- a/docs/reference/esql/functions/count-distinct.asciidoc +++ b/docs/reference/esql/functions/count-distinct.asciidoc @@ -6,7 +6,7 @@ [source,esql] ---- -COUNT_DISTINCT(column[, precision]) +COUNT_DISTINCT(column[, precision_threshold]) ---- *Parameters* @@ -14,8 +14,10 @@ COUNT_DISTINCT(column[, precision]) `column`:: Column for which to count the number of distinct values. -`precision`:: -Precision. Refer to <>. +`precision_threshold`:: +Precision threshold. Refer to <>. The +maximum supported value is 40000. Thresholds above this number will have the +same effect as a threshold of 40000. The default value is 3000. *Description* @@ -37,8 +39,12 @@ properties: include::../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] -The `COUNT_DISTINCT` function takes an optional second parameter to configure the -precision. +The `COUNT_DISTINCT` function takes an optional second parameter to configure +the precision threshold. The precision_threshold options allows to trade memory +for accuracy, and defines a unique count below which counts are expected to be +close to accurate. Above this value, counts might become a bit more fuzzy. The +maximum supported value is 40000, thresholds above this number will have the +same effect as a threshold of 40000. The default value is `3000`. *Supported types* @@ -55,7 +61,7 @@ include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct] include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-result] |=== -With the optional second parameter to configure the precision: +With the optional second parameter to configure the precision threshold: [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 8ff7b1e974eeb..f90bc007f744e 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -9,6 +9,7 @@ // tag::date_list[] * <> +* <> * <> * <> * <> @@ -17,6 +18,7 @@ // end::date_list[] include::auto_bucket.asciidoc[] +include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc index 6127290466b10..fa51e6f906110 100644 --- a/docs/reference/esql/functions/date_diff.asciidoc +++ b/docs/reference/esql/functions/date_diff.asciidoc @@ -1,10 +1,30 @@ [discrete] [[esql-date_diff]] === `DATE_DIFF` -Subtract the second argument from the third argument and return their difference in multiples of the unit specified in the first argument. -If the second argument (start) is greater than the third argument (end), then negative values are returned. -[cols="^,^"] +*Syntax* + +[.text-center] +image::esql/functions/signature/date_diff.svg[Embedded,opts=inline] + +*Parameters* + +`unit`:: +Time difference unit. + +`startTimestamp`:: +Start timestamp. + +`endTimestamp`:: +End timestamp. + +*Description* + +Subtracts the `startTimestamp` from the `endTimestamp` and returns the +difference in multiples of `unit`. If `startTimestamp` is later than the +`endTimestamp`, negative values are returned. + +[cols="^,^",role="styled"] |=== 2+h|Datetime difference units @@ -26,12 +46,18 @@ s|abbreviations | nanosecond | nanoseconds, ns |=== +*Supported types* + +include::types/date_diff.asciidoc[] + +*Example* + [source.merge.styled,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=dateDiff] +include::{esql-specs}/date.csv-spec[tag=docsDateDiff] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs.csv-spec[tag=dateDiff-result] +include::{esql-specs}/date.csv-spec[tag=docsDateDiff-result] |=== diff --git a/docs/reference/esql/functions/e.asciidoc b/docs/reference/esql/functions/e.asciidoc index 56bf97fd01740..ac082c1a68a07 100644 --- a/docs/reference/esql/functions/e.asciidoc +++ b/docs/reference/esql/functions/e.asciidoc @@ -1,10 +1,17 @@ [discrete] [[esql-e]] === `E` + +*Syntax* + [.text-center] image::esql/functions/signature/e.svg[Embedded,opts=inline] -{wikipedia}/E_(mathematical_constant)[Euler's number]. +*Description* + +Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/ends_with.asciidoc b/docs/reference/esql/functions/ends_with.asciidoc index fd2d99931163a..49477996ada19 100644 --- a/docs/reference/esql/functions/ends_with.asciidoc +++ b/docs/reference/esql/functions/ends_with.asciidoc @@ -1,11 +1,30 @@ [discrete] [[esql-ends_with]] === `ENDS_WITH` + +*Syntax* + [.text-center] image::esql/functions/signature/ends_with.svg[Embedded,opts=inline] +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +`suffix`:: +String expression. If `null`, the function returns `null`. + +*Description* + Returns a boolean that indicates whether a keyword string ends with another -string: +string. + +*Supported types* + +include::types/ends_with.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,7 +34,3 @@ include::{esql-specs}/string.csv-spec[tag=endsWith] |=== include::{esql-specs}/string.csv-spec[tag=endsWith-result] |=== - -Supported types: - -include::types/ends_with.asciidoc[] diff --git a/docs/reference/esql/functions/floor.asciidoc b/docs/reference/esql/functions/floor.asciidoc index 109033bb18827..0730a87e595fd 100644 --- a/docs/reference/esql/functions/floor.asciidoc +++ b/docs/reference/esql/functions/floor.asciidoc @@ -1,10 +1,30 @@ [discrete] [[esql-floor]] === `FLOOR` + +*Syntax* + [.text-center] image::esql/functions/signature/floor.svg[Embedded,opts=inline] -Round a number down to the nearest integer. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Rounds a number down to the nearest integer. + +NOTE: This is a noop for `long` (including unsigned) and `integer`. + For `double` this picks the closest `double` value to the integer + similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. + +*Supported types* + +include::types/floor.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,11 +34,3 @@ include::{esql-specs}/math.csv-spec[tag=floor] |=== include::{esql-specs}/math.csv-spec[tag=floor-result] |=== - -NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the the closest `double` value to the integer ala - {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. - -Supported types: - -include::types/floor.asciidoc[] diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc index 24dd08de2819c..b9fc114d39ec6 100644 --- a/docs/reference/esql/functions/greatest.asciidoc +++ b/docs/reference/esql/functions/greatest.asciidoc @@ -1,11 +1,34 @@ [discrete] [[esql-greatest]] === `GREATEST` + +*Syntax* + [.text-center] image::esql/functions/signature/greatest.svg[Embedded,opts=inline] -Returns the maximum value from many columns. This is similar to <> -except it's intended to run on multiple columns at once. +*Parameters* + +`first`:: +First of the columns to evaluate. + +`rest`:: +The rest of the columns to evaluate. + +*Description* + +Returns the maximum value from multiple columns. This is similar to <> +except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the last string + in alphabetical order. When run on `boolean` columns this will return + `true` if any values are `true`. + +*Supported types* + +include::types/greatest.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,11 +38,3 @@ include::{esql-specs}/math.csv-spec[tag=greatest] |=== include::{esql-specs}/math.csv-spec[tag=greatest-result] |=== - -NOTE: When run on `keyword` or `text` fields, this'll return the last string - in alphabetical order. When run on `boolean` columns this will return - `true` if any values are `true`. - -Supported types: - -include::types/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc index 62d7406199cd4..41f58b0d415c2 100644 --- a/docs/reference/esql/functions/least.asciidoc +++ b/docs/reference/esql/functions/least.asciidoc @@ -1,11 +1,34 @@ [discrete] [[esql-least]] === `LEAST` + +*Syntax* + [.text-center] image::esql/functions/signature/least.svg[Embedded,opts=inline] -Returns the minimum value from many columns. This is similar to <> -except it's intended to run on multiple columns at once. +*Parameters* + +`first`:: +First of the columns to evaluate. + +`rest`:: +The rest of the columns to evaluate. + +*Description* + +Returns the minimum value from multiple columns. This is similar to +<> except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the first string + in alphabetical order. When run on `boolean` columns this will return + `false` if any values are `false`. + +*Supported types* + +include::types/least.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,11 +38,3 @@ include::{esql-specs}/math.csv-spec[tag=least] |=== include::{esql-specs}/math.csv-spec[tag=least-result] |=== - -NOTE: When run on `keyword` or `text` fields, this'll return the first string - in alphabetical order. When run on `boolean` columns this will return - `false` if any values are `false`. - -Supported types: - -include::types/least.asciidoc[] diff --git a/docs/reference/esql/functions/left.asciidoc b/docs/reference/esql/functions/left.asciidoc index 67e739377aa46..5d666656b1ee4 100644 --- a/docs/reference/esql/functions/left.asciidoc +++ b/docs/reference/esql/functions/left.asciidoc @@ -1,10 +1,30 @@ [discrete] [[esql-left]] === `LEFT` + +*Syntax* + [.text-center] image::esql/functions/signature/left.svg[Embedded,opts=inline] -Return the substring that extracts 'length' chars from the 'string' starting from the left. +*Parameters* + +`str`:: +The string from which to return a substring. + +`length`:: +The number of characters to return. + +*Description* + +Returns the substring that extracts 'length' chars from 'str' starting +from the left. + +*Supported types* + +include::types/left.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +34,3 @@ include::{esql-specs}/string.csv-spec[tag=left] |=== include::{esql-specs}/string.csv-spec[tag=left-result] |=== - -Supported types: - -include::types/left.asciidoc[] diff --git a/docs/reference/esql/functions/length.asciidoc b/docs/reference/esql/functions/length.asciidoc index 12e1bed3d0a66..b89b75a702460 100644 --- a/docs/reference/esql/functions/length.asciidoc +++ b/docs/reference/esql/functions/length.asciidoc @@ -1,11 +1,30 @@ [discrete] [[esql-length]] === `LENGTH` -Returns the character length of a string. + +*Syntax* [source,esql] ---- -FROM employees -| KEEP first_name, last_name, height -| EVAL fn_length = LENGTH(first_name) +LENGTH(str) +---- + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + +Returns the character length of a string. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/eval.csv-spec[tag=length] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/eval.csv-spec[tag=length-result] +|=== diff --git a/docs/reference/esql/functions/log10.asciidoc b/docs/reference/esql/functions/log10.asciidoc index 219519ca2a0d7..d806da3173818 100644 --- a/docs/reference/esql/functions/log10.asciidoc +++ b/docs/reference/esql/functions/log10.asciidoc @@ -1,13 +1,27 @@ [discrete] [[esql-log10]] === `LOG10` + +*Syntax* + [.text-center] image::esql/functions/signature/log10.svg[Embedded,opts=inline] -Returns the log base 10. The input can be any numeric value, the return value -is always a double. +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* -Logs of negative numbers are NaN. Logs of infinites are infinite, as is the log of 0. +Returns the logarithm to base 10. The input can be any numeric value, the return +value is always a double. + +Logs of 0, negative numbers, and infinites return `null` as well as a warning. + +*Supported types* + +include::types/log10.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -17,7 +31,3 @@ include::{esql-specs}/math.csv-spec[tag=log10] |=== include::{esql-specs}/math.csv-spec[tag=log10-result] |=== - -Supported types: - -include::types/log10.asciidoc[] diff --git a/docs/reference/esql/functions/ltrim.asciidoc b/docs/reference/esql/functions/ltrim.asciidoc index e5230e4edd41a..4b7b619d06afc 100644 --- a/docs/reference/esql/functions/ltrim.asciidoc +++ b/docs/reference/esql/functions/ltrim.asciidoc @@ -1,11 +1,27 @@ [discrete] [[esql-ltrim]] === `LTRIM` + +*Syntax* + [.text-center] image::esql/functions/signature/ltrim.svg[Embedded,opts=inline] +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + Removes leading whitespaces from strings. +*Supported types* + +include::types/rtrim.asciidoc[] + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/string.csv-spec[tag=ltrim] @@ -13,8 +29,4 @@ include::{esql-specs}/string.csv-spec[tag=ltrim] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/string.csv-spec[tag=ltrim-result] -|=== - -Supported types: - -include::types/rtrim.asciidoc[] +|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/max.asciidoc b/docs/reference/esql/functions/max.asciidoc index 53997e501b37f..4bc62de341d9d 100644 --- a/docs/reference/esql/functions/max.asciidoc +++ b/docs/reference/esql/functions/max.asciidoc @@ -1,7 +1,24 @@ [discrete] [[esql-agg-max]] === `MAX` -The maximum value of a numeric field. + +*Syntax* + +[source,esql] +---- +MAX(column) +---- + +*Parameters* + +`column`:: +Column from which to return the maximum value. + +*Description* + +Returns the maximum value of a numeric column. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/median-absolute-deviation.asciidoc b/docs/reference/esql/functions/median-absolute-deviation.asciidoc index fe0923da1fb88..301d344489643 100644 --- a/docs/reference/esql/functions/median-absolute-deviation.asciidoc +++ b/docs/reference/esql/functions/median-absolute-deviation.asciidoc @@ -1,23 +1,29 @@ [discrete] [[esql-agg-median-absolute-deviation]] === `MEDIAN_ABSOLUTE_DEVIATION` -The median absolute deviation, a measure of variability. It is a robust -statistic, meaning that it is useful for describing data that may have outliers, -or may not be normally distributed. For such data it can be more descriptive than -standard deviation. -It is calculated as the median of each data point’s deviation from the median of -the entire sample. That is, for a random variable `X`, the median absolute deviation -is `median(|median(X) - Xi|)`. +*Syntax* -[source.merge.styled,esql] +[source,esql] ---- -include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation] +MEDIAN_ABSOLUTE_DEVIATION(column) ---- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation-result] -|=== + +*Parameters* + +`column`:: +Column from which to return the median absolute deviation. + +*Description* + +Returns the median absolute deviation, a measure of variability. It is a robust +statistic, meaning that it is useful for describing data that may have outliers, +or may not be normally distributed. For such data it can be more descriptive +than standard deviation. + +It is calculated as the median of each data point's deviation from the median of +the entire sample. That is, for a random variable `X`, the median absolute +deviation is `median(|median(X) - X|)`. NOTE: Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is <>. @@ -27,3 +33,14 @@ NOTE: Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is `MEDIAN_ABSOLUTE_DEVIATION` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. This means you can get slightly different results using the same data. ==== + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation-result] +|=== diff --git a/docs/reference/esql/functions/median.asciidoc b/docs/reference/esql/functions/median.asciidoc index 5a0d0c049602e..17b51d9c50b26 100644 --- a/docs/reference/esql/functions/median.asciidoc +++ b/docs/reference/esql/functions/median.asciidoc @@ -1,17 +1,23 @@ [discrete] [[esql-agg-median]] === `MEDIAN` -The value that is greater than half of all values and less than half of -all values, also known as the 50% <>. -[source.merge.styled,esql] +*Syntax* + +[source,esql] ---- -include::{esql-specs}/stats_percentile.csv-spec[tag=median] +MEDIAN(column) ---- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_percentile.csv-spec[tag=median-result] -|=== + +*Parameters* + +`column`:: +Column from which to return the median value. + +*Description* + +Returns the value that is greater than half of all values and less than half of +all values, also known as the 50% <>. NOTE: Like <>, `MEDIAN` is <>. @@ -20,3 +26,14 @@ NOTE: Like <>, `MEDIAN` is <> on them first: +To concat non-string columns, call <> first: [source.merge.styled,esql] ---- @@ -26,7 +45,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_concat-to_string] |=== include::{esql-specs}/string.csv-spec[tag=mv_concat-to_string-result] |=== - -Supported types: - -include::types/mv_concat.asciidoc[] diff --git a/docs/reference/esql/functions/mv_count.asciidoc b/docs/reference/esql/functions/mv_count.asciidoc index e6a61cd6e9c63..0545335556030 100644 --- a/docs/reference/esql/functions/mv_count.asciidoc +++ b/docs/reference/esql/functions/mv_count.asciidoc @@ -1,11 +1,27 @@ [discrete] [[esql-mv_count]] === `MV_COUNT` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_count.svg[Embedded,opts=inline] -Converts a multivalued field into a single valued field containing a count of the number -of values: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued expression into a single valued column containing a count +of the number of values. + +*Supported types* + +include::types/mv_count.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,7 +31,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_count] |=== include::{esql-specs}/string.csv-spec[tag=mv_count-result] |=== - -Supported types: - -include::types/mv_count.asciidoc[] diff --git a/docs/reference/esql/functions/mv_dedupe.asciidoc b/docs/reference/esql/functions/mv_dedupe.asciidoc index c85c6ddff4354..09b3827c45e45 100644 --- a/docs/reference/esql/functions/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/mv_dedupe.asciidoc @@ -1,10 +1,28 @@ [discrete] [[esql-mv_dedupe]] === `MV_DEDUPE` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_dedupe.svg[Embedded,opts=inline] -Removes duplicates from a multivalued field. For example: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Removes duplicates from a multivalue expression. + +NOTE: `MV_DEDUPE` may, but won't always, sort the values in the column. + +*Supported types* + +include::types/mv_dedupe.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,9 +32,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_dedupe] |=== include::{esql-specs}/string.csv-spec[tag=mv_dedupe-result] |=== - -Supported types: - -include::types/mv_dedupe.asciidoc[] - -NOTE: `MV_DEDUPE` may, but won't always, sort the values in the field. diff --git a/docs/reference/esql/functions/mv_first.asciidoc b/docs/reference/esql/functions/mv_first.asciidoc index 42ac8930136cc..13d21b15f958e 100644 --- a/docs/reference/esql/functions/mv_first.asciidoc +++ b/docs/reference/esql/functions/mv_first.asciidoc @@ -1,11 +1,34 @@ [discrete] [[esql-mv_first]] === `MV_FIRST` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_first.svg[Embedded,opts=inline] -Converts a multivalued field into a single valued field containing the first value. This is most -useful when reading from a function that emits multivalued fields in a known order like <>: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued expression into a single valued column containing the +first value. This is most useful when reading from a function that emits +multivalued columns in a known order like <>. + +The order that <> are read from +underlying storage is not guaranteed. It is *frequently* ascending, but don't +rely on that. If you need the minimum value use <> instead of +`MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a +performance benefit to `MV_FIRST`. + +*Supported types* + +include::types/mv_first.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,13 +38,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_first] |=== include::{esql-specs}/string.csv-spec[tag=mv_first-result] |=== - -The order that <> are read from underlying storage is not -guaranteed. It is *frequently* ascending, but don't rely on that. If you need the minimum field value -use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't -a performance benefit to `MV_FIRST`. `MV_FIRST` is mostly useful with functions that create multivalued -fields like `SPLIT`. - -Supported types: - -include::types/mv_first.asciidoc[] diff --git a/docs/reference/esql/functions/mv_last.asciidoc b/docs/reference/esql/functions/mv_last.asciidoc index aa6fc40d0af07..ee6a4a8fed8ba 100644 --- a/docs/reference/esql/functions/mv_last.asciidoc +++ b/docs/reference/esql/functions/mv_last.asciidoc @@ -1,11 +1,34 @@ [discrete] [[esql-mv_last]] === `MV_LAST` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_last.svg[Embedded,opts=inline] -Converts a multivalued field into a single valued field containing the last value. This is most -useful when reading from a function that emits multivalued fields in a known order like <>: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalue expression into a single valued column containing the last +value. This is most useful when reading from a function that emits multivalued +columns in a known order like <>. + +The order that <> are read from +underlying storage is not guaranteed. It is *frequently* ascending, but don't +rely on that. If you need the maximum value use <> instead of +`MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a +performance benefit to `MV_LAST`. + +*Supported types* + +include::types/mv_last.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,13 +38,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_last] |=== include::{esql-specs}/string.csv-spec[tag=mv_last-result] |=== - -The order that <> are read from underlying storage is not -guaranteed. It is *frequently* ascending, but don't rely on that. If you need the maximum field value -use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't -a performance benefit to `MV_LAST`. `MV_LAST` is mostly useful with functions that create multivalued -fields like `SPLIT`. - -Supported types: - -include::types/mv_last.asciidoc[] diff --git a/docs/reference/esql/functions/mv_max.asciidoc b/docs/reference/esql/functions/mv_max.asciidoc index ed433b64a2813..e13e61e0d123d 100644 --- a/docs/reference/esql/functions/mv_max.asciidoc +++ b/docs/reference/esql/functions/mv_max.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-mv_max]] === `MV_MAX` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_max.svg[Embedded,opts=inline] -Converts a multivalued field into a single valued field containing the maximum value. For example: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued expression into a single valued column containing the +maximum value. + +*Supported types* + +include::types/mv_max.asciidoc[] + +*Examples* [source.merge.styled,esql] ---- @@ -15,8 +32,8 @@ include::{esql-specs}/math.csv-spec[tag=mv_max] include::{esql-specs}/math.csv-spec[tag=mv_max-result] |=== -It can be used by any field type, including `keyword` fields. In that case picks the -last string, comparing their utf-8 representation byte by byte: +It can be used by any column type, including `keyword` columns. In that case +it picks the last string, comparing their utf-8 representation byte by byte: [source.merge.styled,esql] ---- @@ -26,7 +43,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_max] |=== include::{esql-specs}/string.csv-spec[tag=mv_max-result] |=== - -Supported types: - -include::types/mv_max.asciidoc[] diff --git a/docs/reference/esql/functions/mv_median.asciidoc b/docs/reference/esql/functions/mv_median.asciidoc index c84cf7a895da5..05c54342c0f74 100644 --- a/docs/reference/esql/functions/mv_median.asciidoc +++ b/docs/reference/esql/functions/mv_median.asciidoc @@ -1,7 +1,27 @@ [discrete] [[esql-mv_median]] === `MV_MEDIAN` -Converts a multivalued field into a single valued field containing the median value. For example: + +[source,esql] +---- +MV_MEDIAN(v) +---- + +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued column into a single valued column containing the median +value. + +*Supported types* + +include::types/mv_median.asciidoc[] + +*Examples* [source.merge.styled,esql] ---- @@ -12,9 +32,9 @@ include::{esql-specs}/math.csv-spec[tag=mv_median] include::{esql-specs}/math.csv-spec[tag=mv_median-result] |=== -It can be used by any numeric field type and returns a value of the same type. If the -row has an even number of values for a column the result will be the average of the -middle two entries. If the field is not floating point then the average rounds *down*: +If the row has an even number of values for a column, the result will be the +average of the middle two entries. If the column is not floating point, the +average rounds *down*: [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/mv_min.asciidoc b/docs/reference/esql/functions/mv_min.asciidoc index b0c8dd51c97fc..b851f480fd619 100644 --- a/docs/reference/esql/functions/mv_min.asciidoc +++ b/docs/reference/esql/functions/mv_min.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-mv_min]] === `MV_MIN` + +*Syntax* + [.text-center] image::esql/functions/signature/mv_min.svg[Embedded,opts=inline] -Converts a multivalued field into a single valued field containing the minimum value. For example: +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued expression into a single valued column containing the +minimum value. + +*Supported types* + +include::types/mv_min.asciidoc[] + +*Examples* [source.merge.styled,esql] ---- @@ -15,8 +32,8 @@ include::{esql-specs}/math.csv-spec[tag=mv_min] include::{esql-specs}/math.csv-spec[tag=mv_min-result] |=== -It can be used by any field type, including `keyword` fields. In that case picks the -first string, comparing their utf-8 representation byte by byte: +It can be used by any column type, including `keyword` columns. In that case, +it picks the first string, comparing their utf-8 representation byte by byte: [source.merge.styled,esql] ---- @@ -26,7 +43,3 @@ include::{esql-specs}/string.csv-spec[tag=mv_min] |=== include::{esql-specs}/string.csv-spec[tag=mv_min-result] |=== - -Supported types: - -include::types/mv_min.asciidoc[] diff --git a/docs/reference/esql/functions/mv_sum.asciidoc b/docs/reference/esql/functions/mv_sum.asciidoc index 646af03305954..bc252bc9d3fa0 100644 --- a/docs/reference/esql/functions/mv_sum.asciidoc +++ b/docs/reference/esql/functions/mv_sum.asciidoc @@ -1,8 +1,27 @@ [discrete] [[esql-mv_sum]] === `MV_SUM` -Converts a multivalued field into a single valued field containing the sum -of all of the values. For example: + +[source,esql] +---- +MV_SUM(v) +---- + +*Parameters* + +`v`:: +Multivalue expression. + +*Description* + +Converts a multivalued column into a single valued column containing the sum +of all of the values. + +*Supported types* + +include::types/mv_sum.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -12,5 +31,3 @@ include::{esql-specs}/math.csv-spec[tag=mv_sum] |=== include::{esql-specs}/math.csv-spec[tag=mv_sum-result] |=== - -NOTE: The input type can be any number and the output type is the same as the input type. diff --git a/docs/reference/esql/functions/now.asciidoc b/docs/reference/esql/functions/now.asciidoc index 5d33449a1e906..3c46f557acd1f 100644 --- a/docs/reference/esql/functions/now.asciidoc +++ b/docs/reference/esql/functions/now.asciidoc @@ -1,9 +1,28 @@ [discrete] [[esql-now]] === `NOW` + +*Syntax* + +[source,esql] +---- +NOW() +---- + +*Description* + Returns current date and time. +*Example* + [source,esql] ---- -ROW current_date = NOW() +include::{esql-specs}/date.csv-spec[tag=docsNow] +---- + +To retrieve logs from the last hour: + +[source,esql] ---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- \ No newline at end of file diff --git a/docs/reference/esql/functions/percentile.asciidoc b/docs/reference/esql/functions/percentile.asciidoc index 917a4a81e7b4f..ab3f14af70486 100644 --- a/docs/reference/esql/functions/percentile.asciidoc +++ b/docs/reference/esql/functions/percentile.asciidoc @@ -1,18 +1,27 @@ [discrete] [[esql-agg-percentile]] === `PERCENTILE` -The value at which a certain percentage of observed values occur. For example, -the 95th percentile is the value which is greater than 95% of the observed values and -the 50th percentile is the <>. -[source.merge.styled,esql] +*Syntax* + +[source,esql] ---- -include::{esql-specs}/stats_percentile.csv-spec[tag=percentile] +PERCENTILE(column, percentile) ---- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_percentile.csv-spec[tag=percentile-result] -|=== + +*Parameters* + +`column`:: +Column to convert from multiple values to single value. + +`percentile`:: +A constant numeric expression. + +*Description* + +Returns the value at which a certain percentage of observed values occur. For +example, the 95th percentile is the value which is greater than 95% of the +observed values and the 50th percentile is the <>. [discrete] [[esql-agg-percentile-approximate]] @@ -26,5 +35,13 @@ include::../../aggregations/metrics/percentile-aggregation.asciidoc[tag=approxim This means you can get slightly different results using the same data. ==== +*Example* - +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=percentile] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=percentile-result] +|=== diff --git a/docs/reference/esql/functions/pi.asciidoc b/docs/reference/esql/functions/pi.asciidoc index cd630aaabadcd..fb88cbffc99d0 100644 --- a/docs/reference/esql/functions/pi.asciidoc +++ b/docs/reference/esql/functions/pi.asciidoc @@ -1,10 +1,17 @@ [discrete] [[esql-pi]] === `PI` + +*Syntax* + [.text-center] image::esql/functions/signature/pi.svg[Embedded,opts=inline] -The {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. +*Description* + +Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/pow.asciidoc b/docs/reference/esql/functions/pow.asciidoc index b13151c8cbd76..8c31bd21e8a46 100644 --- a/docs/reference/esql/functions/pow.asciidoc +++ b/docs/reference/esql/functions/pow.asciidoc @@ -1,12 +1,31 @@ [discrete] [[esql-pow]] === `POW` + +*Syntax* + [.text-center] image::esql/functions/signature/pow.svg[Embedded,opts=inline] -Returns the value of a base (first argument) raised to the power of an exponent (second argument). -Both arguments must be numeric. The output is always a double. Note that it is still possible to overflow -a double result here; in that case, null will be returned. +*Parameters* + +`base`:: +Numeric expression. If `null`, the function returns `null`. + +`exponent`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the value of `base` raised to the power of `exponent`. Both arguments +must be numeric. The output is always a double. Note that it is still possible +to overflow a double result here; in that case, null will be returned. + +*Supported types* + +include::types/pow.asciidoc[] + +*Examples* [source.merge.styled,esql] ---- @@ -17,10 +36,6 @@ include::{esql-specs}/math.csv-spec[tag=powDI] include::{esql-specs}/math.csv-spec[tag=powDI-result] |=== - -[discrete] -==== Fractional exponents - The exponent can be a fraction, which is similar to performing a root. For example, the exponent of `0.5` will give the square root of the base: @@ -32,10 +47,3 @@ include::{esql-specs}/math.csv-spec[tag=powID-sqrt] |=== include::{esql-specs}/math.csv-spec[tag=powID-sqrt-result] |=== - -[discrete] -==== Table of supported input and output types - -For clarity, the following table describes the output result type for all combinations of numeric input types: - -include::types/pow.asciidoc[] diff --git a/docs/reference/esql/functions/replace.asciidoc b/docs/reference/esql/functions/replace.asciidoc index 9bc0f85fdddce..05856829eb193 100644 --- a/docs/reference/esql/functions/replace.asciidoc +++ b/docs/reference/esql/functions/replace.asciidoc @@ -1,11 +1,38 @@ [discrete] [[esql-replace]] === `REPLACE` -The function substitutes in the string (1st argument) any match of the regular expression (2nd argument) with the replacement string (3rd argument). -If any of the arguments are `NULL`, the result is `NULL`. +*Syntax* -. This example replaces an occurrence of the word "World" with the word "Universe": +[.text-center] +image::esql/functions/signature/replace.svg[Embedded,opts=inline] + +*Parameters* + +`str`:: +String expression. + +`regex`:: +Regular expression. + +`newStr`:: +Replacement string. + +*Description* + +The function substitutes in the string `str` any match of the regular expression +`regex` with the replacement string `newStr`. + +If any of the arguments is `null`, the result is `null`. + +*Supported types* + +include::types/replace.asciidoc[] + +*Example* + +This example replaces any occurrence of the word "World" with the word +"Universe": [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/right.asciidoc b/docs/reference/esql/functions/right.asciidoc index a0f18192d410d..1b291e53729ee 100644 --- a/docs/reference/esql/functions/right.asciidoc +++ b/docs/reference/esql/functions/right.asciidoc @@ -1,10 +1,30 @@ [discrete] [[esql-right]] === `RIGHT` + +*Syntax* + [.text-center] image::esql/functions/signature/right.svg[Embedded,opts=inline] -Return the substring that extracts 'length' chars from the 'string' starting from the right. +*Parameters* + +`str`:: +The string from which to returns a substring. + +`length`:: +The number of characters to return. + +*Description* + +Return the substring that extracts 'length' chars from 'str' starting +from the right. + +*Supported types* + +include::types/right.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +34,3 @@ include::{esql-specs}/string.csv-spec[tag=right] |=== include::{esql-specs}/string.csv-spec[tag=right-result] |=== - -Supported types: - -include::types/right.asciidoc[] diff --git a/docs/reference/esql/functions/round.asciidoc b/docs/reference/esql/functions/round.asciidoc index 4ec71cf682d0f..7f1285e85f664 100644 --- a/docs/reference/esql/functions/round.asciidoc +++ b/docs/reference/esql/functions/round.asciidoc @@ -1,10 +1,31 @@ [discrete] [[esql-round]] === `ROUND` +*Syntax* + +[.text-center] +image::esql/functions/signature/round.svg[Embedded,opts=inline] + +*Parameters* + +`value`:: +Numeric expression. If `null`, the function returns `null`. + +`decimals`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + Rounds a number to the closest number with the specified number of digits. Defaults to 0 digits if no number of digits is provided. If the specified number of digits is negative, rounds to the number of digits left of the decimal point. +*Supported types* + +include::types/round.asciidoc[] + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/docs.csv-spec[tag=round] diff --git a/docs/reference/esql/functions/rtrim.asciidoc b/docs/reference/esql/functions/rtrim.asciidoc index 8eb0494e90d9e..588b7b9fc5433 100644 --- a/docs/reference/esql/functions/rtrim.asciidoc +++ b/docs/reference/esql/functions/rtrim.asciidoc @@ -1,11 +1,27 @@ [discrete] [[esql-rtrim]] === `RTRIM` + +*Syntax* + [.text-center] image::esql/functions/signature/rtrim.svg[Embedded,opts=inline] +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + Removes trailing whitespaces from strings. +*Supported types* + +include::types/rtrim.asciidoc[] + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/string.csv-spec[tag=rtrim] @@ -14,7 +30,3 @@ include::{esql-specs}/string.csv-spec[tag=rtrim] |=== include::{esql-specs}/string.csv-spec[tag=rtrim-result] |=== - -Supported types: - -include::types/rtrim.asciidoc[] diff --git a/docs/reference/esql/functions/signature/coalesce.svg b/docs/reference/esql/functions/signature/coalesce.svg index bfe80812327a9..22a70efead49c 100644 --- a/docs/reference/esql/functions/signature/coalesce.svg +++ b/docs/reference/esql/functions/signature/coalesce.svg @@ -1 +1 @@ -COALESCE(arg1,arg2) \ No newline at end of file +COALESCE(expression,expressionX) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/concat.svg b/docs/reference/esql/functions/signature/concat.svg index 1ca5a9bc2d06f..3ad2ae37b11c3 100644 --- a/docs/reference/esql/functions/signature/concat.svg +++ b/docs/reference/esql/functions/signature/concat.svg @@ -1 +1 @@ -CONCAT(arg1,arg2) \ No newline at end of file +CONCAT(first,rest) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_extract.svg b/docs/reference/esql/functions/signature/date_extract.svg index ec69633c02e8b..397cdd400d88c 100644 --- a/docs/reference/esql/functions/signature/date_extract.svg +++ b/docs/reference/esql/functions/signature/date_extract.svg @@ -1 +1 @@ -DATE_EXTRACT(arg1,arg2) \ No newline at end of file +DATE_EXTRACT(date_part,field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ends_with.svg b/docs/reference/esql/functions/signature/ends_with.svg index bf2cb47ed0be0..575452e1bb8c6 100644 --- a/docs/reference/esql/functions/signature/ends_with.svg +++ b/docs/reference/esql/functions/signature/ends_with.svg @@ -1 +1 @@ -ENDS_WITH(arg1,arg2) \ No newline at end of file +ENDS_WITH(str,suffix) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/left.svg b/docs/reference/esql/functions/signature/left.svg index ec14bf8c72131..75704982af004 100644 --- a/docs/reference/esql/functions/signature/left.svg +++ b/docs/reference/esql/functions/signature/left.svg @@ -1 +1 @@ -LEFT(string,length) \ No newline at end of file +LEFT(str,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/length.svg b/docs/reference/esql/functions/signature/length.svg index 65c3f4a9db89a..d199f1a9a0170 100644 --- a/docs/reference/esql/functions/signature/length.svg +++ b/docs/reference/esql/functions/signature/length.svg @@ -1 +1 @@ -LENGTH(arg1) \ No newline at end of file +LENGTH(str) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_avg.svg b/docs/reference/esql/functions/signature/mv_avg.svg index 4fc02033e4fdb..4c2371eac0b44 100644 --- a/docs/reference/esql/functions/signature/mv_avg.svg +++ b/docs/reference/esql/functions/signature/mv_avg.svg @@ -1 +1 @@ -MV_AVG(arg1) \ No newline at end of file +MV_AVG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_median.svg b/docs/reference/esql/functions/signature/mv_median.svg index 3fd5dd009b143..b287fde6dd97e 100644 --- a/docs/reference/esql/functions/signature/mv_median.svg +++ b/docs/reference/esql/functions/signature/mv_median.svg @@ -1 +1 @@ -MV_MEDIAN(arg1) \ No newline at end of file +MV_MEDIAN(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_sum.svg b/docs/reference/esql/functions/signature/mv_sum.svg index ff0fd374025ac..3e3fbd30355b1 100644 --- a/docs/reference/esql/functions/signature/mv_sum.svg +++ b/docs/reference/esql/functions/signature/mv_sum.svg @@ -1 +1 @@ -MV_SUM(arg1) \ No newline at end of file +MV_SUM(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/replace.svg b/docs/reference/esql/functions/signature/replace.svg index 7c86c00d019cb..bbcd11bcc0ab6 100644 --- a/docs/reference/esql/functions/signature/replace.svg +++ b/docs/reference/esql/functions/signature/replace.svg @@ -1 +1 @@ -REPLACE(arg1,arg2,arg3) \ No newline at end of file +REPLACE(str,regex,newStr) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/right.svg b/docs/reference/esql/functions/signature/right.svg index 0afa5dbf01f16..969a6c9442479 100644 --- a/docs/reference/esql/functions/signature/right.svg +++ b/docs/reference/esql/functions/signature/right.svg @@ -1 +1 @@ -RIGHT(string,length) \ No newline at end of file +RIGHT(str,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/round.svg b/docs/reference/esql/functions/signature/round.svg index 42a2da87527d9..9da0b9d11213e 100644 --- a/docs/reference/esql/functions/signature/round.svg +++ b/docs/reference/esql/functions/signature/round.svg @@ -1 +1 @@ -ROUND(arg1,arg2) \ No newline at end of file +ROUND(value,decimals) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/split.svg b/docs/reference/esql/functions/signature/split.svg index cd31aef97f8a5..1213f6041b0c4 100644 --- a/docs/reference/esql/functions/signature/split.svg +++ b/docs/reference/esql/functions/signature/split.svg @@ -1 +1 @@ -SPLIT(arg1,arg2) \ No newline at end of file +SPLIT(str,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/starts_with.svg b/docs/reference/esql/functions/signature/starts_with.svg index 0a2d3a1ef0c36..85fcbfa766ae4 100644 --- a/docs/reference/esql/functions/signature/starts_with.svg +++ b/docs/reference/esql/functions/signature/starts_with.svg @@ -1 +1 @@ -STARTS_WITH(arg1,arg2) \ No newline at end of file +STARTS_WITH(str,prefix) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/substring.svg b/docs/reference/esql/functions/signature/substring.svg index 6df5da30f67ed..1f9f6e8c3afa0 100644 --- a/docs/reference/esql/functions/signature/substring.svg +++ b/docs/reference/esql/functions/signature/substring.svg @@ -1 +1 @@ -SUBSTRING(arg1,arg2,arg3) \ No newline at end of file +SUBSTRING(str,start,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_lower.svg b/docs/reference/esql/functions/signature/to_lower.svg new file mode 100644 index 0000000000000..8d49539f0a0c8 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_lower.svg @@ -0,0 +1 @@ +TO_LOWER(str) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_upper.svg b/docs/reference/esql/functions/signature/to_upper.svg new file mode 100644 index 0000000000000..f7cea528331a2 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_upper.svg @@ -0,0 +1 @@ +TO_UPPER(str) \ No newline at end of file diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc index d948bf2ec39a3..e6a8e0cf9331f 100644 --- a/docs/reference/esql/functions/sin.asciidoc +++ b/docs/reference/esql/functions/sin.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-sin]] === `SIN` + +*Syntax* + [.text-center] image::esql/functions/signature/sin.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Sine_and_cosine[Sine] trigonometric function. Input expected in radians. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +{wikipedia}/Sine_and_cosine[Sine] trigonometric function. Input expected in +radians. + +*Supported types* + +include::types/sin.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +31,3 @@ include::{esql-specs}/floats.csv-spec[tag=sin] |=== include::{esql-specs}/floats.csv-spec[tag=sin-result] |=== - -Supported types: - -include::types/sin.asciidoc[] diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc index 11d1ea29bffef..683ae6962c2fd 100644 --- a/docs/reference/esql/functions/sinh.asciidoc +++ b/docs/reference/esql/functions/sinh.asciidoc @@ -1,10 +1,26 @@ [discrete] [[esql-sinh]] === `SINH` + +*Syntax* + [.text-center] image::esql/functions/signature/sinh.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Hyperbolic_functions[Sine] hyperbolic function. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +{wikipedia}/Hyperbolic_functions[Sine] hyperbolic function. + +*Supported types* + +include::types/sinh.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +30,3 @@ include::{esql-specs}/floats.csv-spec[tag=sinh] |=== include::{esql-specs}/floats.csv-spec[tag=sinh-result] |=== - -Supported types: - -include::types/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/split.asciidoc b/docs/reference/esql/functions/split.asciidoc index a6f8869bf89ca..0a4ce584d01da 100644 --- a/docs/reference/esql/functions/split.asciidoc +++ b/docs/reference/esql/functions/split.asciidoc @@ -1,18 +1,33 @@ [discrete] [[esql-split]] === `SPLIT` -Split a single valued string into multiple strings. For example: -[source,esql] +[.text-center] +image::esql/functions/signature/split.svg[Embedded,opts=inline] + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +`delim`:: +Delimiter. Only single byte delimiters are currently supported. + +*Description* + +Splits a single valued string into multiple strings. + +*Supported types* + +include::types/split.asciidoc[] + +*Example* + +[source.merge.styled,esql] ---- include::{esql-specs}/string.csv-spec[tag=split] ---- - -Which splits `"foo;bar;baz;qux;quux;corge"` on `;` and returns an array: - -[%header,format=dsv,separator=|] +[%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/string.csv-spec[tag=split-result] |=== - -WARNING: Only single byte delimiters are currently supported. diff --git a/docs/reference/esql/functions/sqrt.asciidoc b/docs/reference/esql/functions/sqrt.asciidoc index 02f7060089971..faf504a6b0af4 100644 --- a/docs/reference/esql/functions/sqrt.asciidoc +++ b/docs/reference/esql/functions/sqrt.asciidoc @@ -1,13 +1,30 @@ [discrete] [[esql-sqrt]] === `SQRT` + +*Syntax* + [.text-center] image::esql/functions/signature/sqrt.svg[Embedded,opts=inline] -Returns the square root of a number. The input can be any numeric value, the return value -is always a double. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the square root of a number. The input can be any numeric value, the +return value is always a double. + +Square roots of negative numbers are NaN. Square roots of infinites are +infinite. -Square roots of negative numbers are NaN. Square roots of infinites are infinite. +*Supported types* + +include::types/sqrt.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -17,7 +34,3 @@ include::{esql-specs}/math.csv-spec[tag=sqrt] |=== include::{esql-specs}/math.csv-spec[tag=sqrt-result] |=== - -Supported types: - -include::types/sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid.asciidoc new file mode 100644 index 0000000000000..abed1e71eab8f --- /dev/null +++ b/docs/reference/esql/functions/st_centroid.asciidoc @@ -0,0 +1,18 @@ +[discrete] +[[esql-agg-st-centroid]] +=== `ST_CENTROID` + +Calculate the spatial centroid over a field with spatial point geometry type. + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports-result] +|=== + +Supported types: + +include::types/st_centroid.asciidoc[] diff --git a/docs/reference/esql/functions/starts_with.asciidoc b/docs/reference/esql/functions/starts_with.asciidoc index f98a76ef68206..4d45e89882400 100644 --- a/docs/reference/esql/functions/starts_with.asciidoc +++ b/docs/reference/esql/functions/starts_with.asciidoc @@ -1,11 +1,30 @@ [discrete] [[esql-starts_with]] === `STARTS_WITH` + +*Syntax* + [.text-center] image::esql/functions/signature/starts_with.svg[Embedded,opts=inline] +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +`prefix`:: +String expression. If `null`, the function returns `null`. + +*Description* + Returns a boolean that indicates whether a keyword string starts with another -string: +string. + +*Supported types* + +include::types/starts_with.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,7 +34,3 @@ include::{esql-specs}/docs.csv-spec[tag=startsWith] |=== include::{esql-specs}/docs.csv-spec[tag=startsWith-result] |=== - -Supported types: - -include::types/starts_with.asciidoc[] diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index b209244b93297..e9fe04ce15761 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -17,6 +17,8 @@ * <> * <> * <> +* <> +* <> * <> // end::string_list[] @@ -29,4 +31,6 @@ include::right.asciidoc[] include::rtrim.asciidoc[] include::split.asciidoc[] include::substring.asciidoc[] +include::to_lower.asciidoc[] +include::to_upper.asciidoc[] include::trim.asciidoc[] diff --git a/docs/reference/esql/functions/substring.asciidoc b/docs/reference/esql/functions/substring.asciidoc index 8b8234de05bba..73df7a19aa6b7 100644 --- a/docs/reference/esql/functions/substring.asciidoc +++ b/docs/reference/esql/functions/substring.asciidoc @@ -1,8 +1,36 @@ [discrete] [[esql-substring]] === `SUBSTRING` + +*Syntax* + +[.text-center] +image::esql/functions/signature/substring.svg[Embedded,opts=inline] + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +`start`:: +Start position. + +`length`:: +Length of the substring from the start position. Optional; if omitted, all +positions after `start` are returned. + +*Description* + Returns a substring of a string, specified by a start position and an optional -length. This example returns the first three characters of every last name: +length. + +*Supported types* + +include::types/substring.asciidoc[] + +*Examples* + +This example returns the first three characters of every last name: [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/sum.asciidoc b/docs/reference/esql/functions/sum.asciidoc index abf790040114d..e88ebbeb3c771 100644 --- a/docs/reference/esql/functions/sum.asciidoc +++ b/docs/reference/esql/functions/sum.asciidoc @@ -1,7 +1,22 @@ [discrete] [[esql-agg-sum]] === `SUM` -The sum of a numeric field. + +*Syntax* + +[source,esql] +---- +SUM(column) +---- + +`column`:: +Numeric column. + +*Description* + +Returns the sum of a numeric column. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc index 03e450ff23b0e..cc06421616fc1 100644 --- a/docs/reference/esql/functions/tan.asciidoc +++ b/docs/reference/esql/functions/tan.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-tan]] === `TAN` + +*Syntax* + [.text-center] image::esql/functions/signature/tan.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Sine_and_cosine[Tangent] trigonometric function. Input expected in radians. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +{wikipedia}/Sine_and_cosine[Tangent] trigonometric function. Input expected in +radians. + +*Supported types* + +include::types/tan.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +31,3 @@ include::{esql-specs}/floats.csv-spec[tag=tan] |=== include::{esql-specs}/floats.csv-spec[tag=tan-result] |=== - -Supported types: - -include::types/tan.asciidoc[] diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc index 218a0155d861c..a21354d23ba50 100644 --- a/docs/reference/esql/functions/tanh.asciidoc +++ b/docs/reference/esql/functions/tanh.asciidoc @@ -1,10 +1,26 @@ [discrete] [[esql-tanh]] === `TANH` + +*Syntax* + [.text-center] image::esql/functions/signature/tanh.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Hyperbolic_functions[Tangent] hyperbolic function. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +{wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function. + +*Supported types* + +include::types/tanh.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +30,3 @@ include::{esql-specs}/floats.csv-spec[tag=tanh] |=== include::{esql-specs}/floats.csv-spec[tag=tanh-result] |=== - -Supported types: - -include::types/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/tau.asciidoc b/docs/reference/esql/functions/tau.asciidoc index 61f352b0db8de..d9720eb34d795 100644 --- a/docs/reference/esql/functions/tau.asciidoc +++ b/docs/reference/esql/functions/tau.asciidoc @@ -1,10 +1,18 @@ [discrete] [[esql-tau]] === `TAU` + +*Syntax* + [.text-center] image::esql/functions/signature/tau.svg[Embedded,opts=inline] -The https://tauday.com/tau-manifesto[ratio] of a circle's circumference to its radius. +*Description* + +Returns the https://tauday.com/tau-manifesto[ratio] of a circle's circumference +to its radius. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/to_boolean.asciidoc b/docs/reference/esql/functions/to_boolean.asciidoc index 03f21a503218c..54c41625f3eba 100644 --- a/docs/reference/esql/functions/to_boolean.asciidoc +++ b/docs/reference/esql/functions/to_boolean.asciidoc @@ -1,14 +1,39 @@ [discrete] [[esql-to_boolean]] === `TO_BOOLEAN` -Converts an input value to a boolean value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a string or numeric type. +*Alias* + +`TO_BOOL` + +*Syntax* + +[source,esql] +---- +TO_BOOLEAN(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input value to a boolean value. A string value of *"true"* will be case-insensitive converted to the Boolean *true*. For anything else, including the empty string, the function will -return *false*. For example: +return *false*. + +The numerical value of *0* will be converted to *false*, anything else will be +converted to *true*. + +*Supported types* + +The input type must be of a string or numeric type. + +*Example* [source.merge.styled,esql] ---- @@ -18,8 +43,3 @@ include::{esql-specs}/boolean.csv-spec[tag=to_boolean] |=== include::{esql-specs}/boolean.csv-spec[tag=to_boolean-result] |=== - -The numerical value of *0* will be converted to *false*, anything else will be -converted to *true*. - -Alias: TO_BOOL diff --git a/docs/reference/esql/functions/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/to_cartesianpoint.asciidoc index 1fb64542681e2..223556d2c0e96 100644 --- a/docs/reference/esql/functions/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/to_cartesianpoint.asciidoc @@ -1,13 +1,31 @@ [discrete] [[esql-to_cartesianpoint]] === `TO_CARTESIANPOINT` -Converts an input value to a `point` value. -The input can be a single- or multi-valued field or an expression. -The input type must be a string or a cartesian `point`. +*Syntax* + +[source,esql] +---- +TO_CARTESIANPOINT(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input value to a `point` value. A string will only be successfully converted if it respects the -https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[WKT Point] format: +{wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. + +*Supported types* + +include::types/to_cartesianpoint.asciidoc[] + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/to_cartesianshape.asciidoc b/docs/reference/esql/functions/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..287d437b3906c --- /dev/null +++ b/docs/reference/esql/functions/to_cartesianshape.asciidoc @@ -0,0 +1,38 @@ +[discrete] +[[esql-to_cartesianshape]] +=== `TO_CARTESIANSHAPE` + +*Syntax* + +[source,esql] +---- +TO_CARTESIANSHAPE(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. +The input type must be a string, a `cartesian_shape` or a `cartesian_point`. + +*Description* + +Converts an input value to a `cartesian_shape` value. + +A string will only be successfully converted if it respects the +{wikipedia}/Well-known_text_representation_of_geometry[WKT] format. + +*Supported types* + +include::types/to_cartesianshape.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=to_cartesianshape-str] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=to_cartesianshape-str-result] +|=== diff --git a/docs/reference/esql/functions/to_datetime.asciidoc b/docs/reference/esql/functions/to_datetime.asciidoc index 750c8025cb6c2..9baf7d818d93c 100644 --- a/docs/reference/esql/functions/to_datetime.asciidoc +++ b/docs/reference/esql/functions/to_datetime.asciidoc @@ -1,13 +1,36 @@ [discrete] [[esql-to_datetime]] === `TO_DATETIME` -Converts an input value to a date value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a string or numeric type. +*Alias* + +`TO_DT` + +*Syntax* + +[source,esql] +---- +TO_DATETIME(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input value to a date value. A string will only be successfully converted if it's respecting the format -`yyyy-MM-dd'T'HH:mm:ss.SSS'Z'` (to convert dates in other formats, use <>). For example: +`yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`. To convert dates in other formats, use +<>. + +*Supported types* + +The input type must be of a string or numeric type. + +*Examples* [source.merge.styled,esql] ---- @@ -30,10 +53,8 @@ A following header will contain the failure reason and the offending value: `"java.lang.IllegalArgumentException: failed to parse date field [1964-06-02 00:00:00] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']"` - If the input parameter is of a numeric type, its value will be interpreted as -milliseconds since the https://en.wikipedia.org/wiki/Unix_time[Unix epoch]. -For example: +milliseconds since the {wikipedia}/Unix_time[Unix epoch]. For example: [source.merge.styled,esql] ---- @@ -43,5 +64,3 @@ include::{esql-specs}/date.csv-spec[tag=to_datetime-int] |=== include::{esql-specs}/date.csv-spec[tag=to_datetime-int-result] |=== - -Alias: TO_DT diff --git a/docs/reference/esql/functions/to_degrees.asciidoc b/docs/reference/esql/functions/to_degrees.asciidoc index 71b480253fe35..7b0846c9a4c3f 100644 --- a/docs/reference/esql/functions/to_degrees.asciidoc +++ b/docs/reference/esql/functions/to_degrees.asciidoc @@ -1,13 +1,29 @@ [discrete] [[esql-to_degrees]] === `TO_DEGREES` -Converts a number in https://en.wikipedia.org/wiki/Radian[radians] -to https://en.wikipedia.org/wiki/Degree_(angle)[degrees]. -The input can be a single- or multi-valued field or an expression. The input -type must be of a numeric type and result is always `double`. +*Syntax* -Example: +[source,esql] +---- +TO_DEGREES(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts a number in {wikipedia}/Radian[radians] to +{wikipedia}/Degree_(angle)[degrees]. + +*Supported types* + +The input type must be of a numeric type and result is always `double`. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/to_double.asciidoc b/docs/reference/esql/functions/to_double.asciidoc index 27ad84e4c7762..5d372d6c77c39 100644 --- a/docs/reference/esql/functions/to_double.asciidoc +++ b/docs/reference/esql/functions/to_double.asciidoc @@ -1,12 +1,37 @@ [discrete] [[esql-to_double]] === `TO_DOUBLE` + +*Alias* + +`TO_DBL` + +*Syntax* + +[source,esql] +---- +TO_DOUBLE(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + Converts an input value to a double value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a boolean, date, string or numeric type. +If the input parameter is of a date type, its value will be interpreted as +milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to double. -Example: +Boolean *true* will be converted to double *1.0*, *false* to *0.0*. + +*Supported types* + +The input type must be of a boolean, date, string or numeric type. + +*Example* [source.merge.styled,esql] ---- @@ -17,22 +42,13 @@ include::{esql-specs}/floats.csv-spec[tag=to_double-str] include::{esql-specs}/floats.csv-spec[tag=to_double-str-result] |=== -Note that in this example, the last conversion of the string isn't -possible. When this happens, the result is a *null* value. In this case a -_Warning_ header is added to the response. The header will provide information -on the source of the failure: +Note that in this example, the last conversion of the string isn't possible. +When this happens, the result is a *null* value. In this case a _Warning_ header +is added to the response. The header will provide information on the source of +the failure: `"Line 1:115: evaluation of [TO_DOUBLE(str2)] failed, treating result as null. Only first 20 failures recorded."` A following header will contain the failure reason and the offending value: `"java.lang.NumberFormatException: For input string: \"foo\""` - - -If the input parameter is of a date type, its value will be interpreted as -milliseconds since the https://en.wikipedia.org/wiki/Unix_time[Unix epoch], -converted to double. - -Boolean *true* will be converted to double *1.0*, *false* to *0.0*. - -Alias: TO_DBL diff --git a/docs/reference/esql/functions/to_geopoint.asciidoc b/docs/reference/esql/functions/to_geopoint.asciidoc index 83936af0c71b3..d4d7d397d8f7b 100644 --- a/docs/reference/esql/functions/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/to_geopoint.asciidoc @@ -1,13 +1,32 @@ [discrete] [[esql-to_geopoint]] === `TO_GEOPOINT` -Converts an input value to a `geo_point` value. -The input can be a single- or multi-valued field or an expression. +*Syntax* + +[source,esql] +---- +TO_GEOPOINT(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. The input type must be a string or a `geo_point`. +*Description* + +Converts an input value to a `geo_point` value. + +*Supported types* + +include::types/to_geopoint.asciidoc[] + A string will only be successfully converted if it respects the -https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[WKT Point] format: +{wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/to_geoshape.asciidoc b/docs/reference/esql/functions/to_geoshape.asciidoc new file mode 100644 index 0000000000000..8a6ec978dc7bf --- /dev/null +++ b/docs/reference/esql/functions/to_geoshape.asciidoc @@ -0,0 +1,38 @@ +[discrete] +[[esql-to_geoshape]] +=== `TO_GEOSHAPE` + +*Syntax* + +[source,esql] +---- +TO_GEOPOINT(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. +The input type must be a string, a `geo_shape` or a `geo_point`. + +*Description* + +Converts an input value to a `geo_shape` value. + +A string will only be successfully converted if it respects the +{wikipedia}/Well-known_text_representation_of_geometry[WKT] format. + +*Supported types* + +include::types/to_geoshape.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=to_geoshape-str] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=to_geoshape-str-result] +|=== diff --git a/docs/reference/esql/functions/to_integer.asciidoc b/docs/reference/esql/functions/to_integer.asciidoc index e62256930c5aa..f07bdcd231e40 100644 --- a/docs/reference/esql/functions/to_integer.asciidoc +++ b/docs/reference/esql/functions/to_integer.asciidoc @@ -1,12 +1,37 @@ [discrete] [[esql-to_integer]] === `TO_INTEGER` + +*Alias* + +`TO_INT` + +*Syntax* + +[source,esql] +---- +TO_INTEGER(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + Converts an input value to an integer value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a boolean, date, string or numeric type. +If the input parameter is of a date type, its value will be interpreted as +milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to integer. -Example: +Boolean *true* will be converted to integer *1*, *false* to *0*. + +*Supported types* + +The input type must be of a boolean, date, string or numeric type. + +*Example* [source.merge.styled,esql] ---- @@ -27,12 +52,3 @@ provide information on the source of the failure: A following header will contain the failure reason and the offending value: `"org.elasticsearch.xpack.ql.InvalidArgumentException: [501379200000] out of [integer] range"` - - -If the input parameter is of a date type, its value will be interpreted as -milliseconds since the https://en.wikipedia.org/wiki/Unix_time[Unix epoch], -converted to integer. - -Boolean *true* will be converted to integer *1*, *false* to *0*. - -Alias: TO_INT diff --git a/docs/reference/esql/functions/to_ip.asciidoc b/docs/reference/esql/functions/to_ip.asciidoc index dea147eba1a41..28e98ea69c305 100644 --- a/docs/reference/esql/functions/to_ip.asciidoc +++ b/docs/reference/esql/functions/to_ip.asciidoc @@ -1,11 +1,24 @@ [discrete] [[esql-to_ip]] === `TO_IP` -Converts an input string to an IP value. -The input can be a single- or multi-valued field or an expression. +*Syntax* + +[source,esql] +---- +TO_IP(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input string to an IP value. -Example: +*Example* [source.merge.styled,esql] ---- @@ -16,10 +29,10 @@ include::{esql-specs}/ip.csv-spec[tag=to_ip] include::{esql-specs}/ip.csv-spec[tag=to_ip-result] |=== -Note that in the example above the last conversion of the string isn't -possible. When this happens, the result is a *null* value. In this case a -_Warning_ header is added to the response. The header will provide information -on the source of the failure: +Note that in this example, the last conversion of the string isn't possible. +When this happens, the result is a *null* value. In this case a _Warning_ header +is added to the response. The header will provide information on the source of +the failure: `"Line 1:68: evaluation of [TO_IP(str2)] failed, treating result as null. Only first 20 failures recorded."` diff --git a/docs/reference/esql/functions/to_long.asciidoc b/docs/reference/esql/functions/to_long.asciidoc index 9501c28a31657..04b2e3980a07d 100644 --- a/docs/reference/esql/functions/to_long.asciidoc +++ b/docs/reference/esql/functions/to_long.asciidoc @@ -1,12 +1,33 @@ [discrete] [[esql-to_long]] === `TO_LONG` + +*Syntax* + +[source,esql] +---- +TO_LONG(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + Converts an input value to a long value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a boolean, date, string or numeric type. +If the input parameter is of a date type, its value will be interpreted as +milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to long. + +Boolean *true* will be converted to long *1*, *false* to *0*. + +*Supported types* -Example: +The input type must be of a boolean, date, string or numeric type. + +*Example* [source.merge.styled,esql] ---- @@ -27,10 +48,3 @@ on the source of the failure: A following header will contain the failure reason and the offending value: `"java.lang.NumberFormatException: For input string: \"foo\""` - - -If the input parameter is of a date type, its value will be interpreted as -milliseconds since the https://en.wikipedia.org/wiki/Unix_time[Unix epoch], -converted to long. - -Boolean *true* will be converted to long *1*, *false* to *0*. diff --git a/docs/reference/esql/functions/to_lower.asciidoc b/docs/reference/esql/functions/to_lower.asciidoc new file mode 100644 index 0000000000000..5b98d82c9a94f --- /dev/null +++ b/docs/reference/esql/functions/to_lower.asciidoc @@ -0,0 +1,32 @@ +[discrete] +[[esql-to_lower]] +=== `TO_LOWER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_lower.svg[Embedded,opts=inline] + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + +Returns a new string representing the input string converted to lower case. + +*Supported types* + +include::types/to_lower.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=to_lower] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=to_lower-result] +|=== diff --git a/docs/reference/esql/functions/to_radians.asciidoc b/docs/reference/esql/functions/to_radians.asciidoc index 1f86f1fb983cc..f3b1fbd1f3794 100644 --- a/docs/reference/esql/functions/to_radians.asciidoc +++ b/docs/reference/esql/functions/to_radians.asciidoc @@ -1,13 +1,29 @@ [discrete] [[esql-to_radians]] === `TO_RADIANS` -Converts a number in https://en.wikipedia.org/wiki/Degree_(angle)[degrees] to -https://en.wikipedia.org/wiki/Radian[radians]. -The input can be a single- or multi-valued field or an expression. The input -type must be of a numeric type and result is always `double`. +*Syntax* -Example: +[source,esql] +---- +TO_RADIANS(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts a number in {wikipedia}/Degree_(angle)[degrees] to +{wikipedia}/Radian[radians]. + +*Supported types* + +The input type must be of a numeric type and result is always `double`. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/to_string.asciidoc b/docs/reference/esql/functions/to_string.asciidoc index d03b6511b8de5..e771915977d97 100644 --- a/docs/reference/esql/functions/to_string.asciidoc +++ b/docs/reference/esql/functions/to_string.asciidoc @@ -1,10 +1,28 @@ [discrete] [[esql-to_string]] === `TO_STRING` + +*Alias* + +`TO_STR` + [.text-center] image::esql/functions/signature/to_string.svg[Embedded,opts=inline] -Converts a field into a string. For example: +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input value into a string. + +*Supported types* + +include::types/to_string.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -25,9 +43,3 @@ include::{esql-specs}/string.csv-spec[tag=to_string_multivalue] |=== include::{esql-specs}/string.csv-spec[tag=to_string_multivalue-result] |=== - -Alias: TO_STR - -Supported types: - -include::types/to_string.asciidoc[] diff --git a/docs/reference/esql/functions/to_unsigned_long.asciidoc b/docs/reference/esql/functions/to_unsigned_long.asciidoc index af3ff05bf055c..a4a6cfd54ed6f 100644 --- a/docs/reference/esql/functions/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/to_unsigned_long.asciidoc @@ -1,12 +1,38 @@ [discrete] [[esql-to_unsigned_long]] === `TO_UNSIGNED_LONG` + +*Aliases* + +`TO_ULONG`, `TO_UL` + +*Syntax* + +[source,esql] +---- +TO_UNSIGNED_LONG(v) +---- + +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + Converts an input value to an unsigned long value. -The input can be a single- or multi-valued field or an expression. The input -type must be of a boolean, date, string or numeric type. +*Supported types* -Example: +The input type must be of a boolean, date, string or numeric type. + +If the input parameter is of a date type, its value will be interpreted as +milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to unsigned +long. + +Boolean *true* will be converted to unsigned long *1*, *false* to *0*. + +*Example* [source.merge.styled,esql] ---- @@ -27,12 +53,3 @@ on the source of the failure: A following header will contain the failure reason and the offending value: `"java.lang.NumberFormatException: Character f is neither a decimal digit number, decimal point, nor \"e\" notation exponential mark."` - - -If the input parameter is of a date type, its value will be interpreted as -milliseconds since the https://en.wikipedia.org/wiki/Unix_time[Unix epoch], -converted to unsigned long. - -Boolean *true* will be converted to unsigned long *1*, *false* to *0*. - -Alias: TO_ULONG, TO_UL diff --git a/docs/reference/esql/functions/to_upper.asciidoc b/docs/reference/esql/functions/to_upper.asciidoc new file mode 100644 index 0000000000000..cea63bcbb4bb0 --- /dev/null +++ b/docs/reference/esql/functions/to_upper.asciidoc @@ -0,0 +1,32 @@ +[discrete] +[[esql-to_upper]] +=== `TO_UPPER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_upper.svg[Embedded,opts=inline] + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + +Returns a new string representing the input string converted to upper case. + +*Supported types* + +include::types/to_upper.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=to_upper] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=to_upper-result] +|=== diff --git a/docs/reference/esql/functions/to_version.asciidoc b/docs/reference/esql/functions/to_version.asciidoc index 33419233c4788..6a1583889c87f 100644 --- a/docs/reference/esql/functions/to_version.asciidoc +++ b/docs/reference/esql/functions/to_version.asciidoc @@ -1,10 +1,30 @@ [discrete] [[esql-to_version]] === `TO_VERSION` + +*Alias* + +`TO_VER` + +*Syntax* + [.text-center] image::esql/functions/signature/to_version.svg[Embedded,opts=inline] -Converts an input string to a version value. For example: +*Parameters* + +`v`:: +Input value. The input can be a single- or multi-valued column or an expression. + +*Description* + +Converts an input string to a version value. + +*Supported types* + +include::types/to_version.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,11 +34,3 @@ include::{esql-specs}/version.csv-spec[tag=to_version] |=== include::{esql-specs}/version.csv-spec[tag=to_version-result] |=== - -The input can be a single- or multi-valued field or an expression. - -Alias: TO_VER - -Supported types: - -include::types/to_version.asciidoc[] diff --git a/docs/reference/esql/functions/trim.asciidoc b/docs/reference/esql/functions/trim.asciidoc index 6ace6118dd757..0b246b7526cd2 100644 --- a/docs/reference/esql/functions/trim.asciidoc +++ b/docs/reference/esql/functions/trim.asciidoc @@ -1,11 +1,27 @@ [discrete] [[esql-trim]] === `TRIM` + +*Syntax* + [.text-center] image::esql/functions/signature/trim.svg[Embedded,opts=inline] +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. + +*Description* + Removes leading and trailing whitespaces from strings. +*Supported types* + +include::types/trim.asciidoc[] + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/string.csv-spec[tag=trim] @@ -14,7 +30,3 @@ include::{esql-specs}/string.csv-spec[tag=trim] |=== include::{esql-specs}/string.csv-spec[tag=trim-result] |=== - -Supported types: - -include::types/trim.asciidoc[] diff --git a/docs/reference/esql/functions/type-conversion-functions.asciidoc b/docs/reference/esql/functions/type-conversion-functions.asciidoc index 48a9b175d3d65..611e1f7fddfb4 100644 --- a/docs/reference/esql/functions/type-conversion-functions.asciidoc +++ b/docs/reference/esql/functions/type-conversion-functions.asciidoc @@ -10,10 +10,12 @@ // tag::type_list[] * <> * <> +* <> * <> * <> * <> * <> +* <> * <> * <> * <> @@ -25,10 +27,12 @@ include::to_boolean.asciidoc[] include::to_cartesianpoint.asciidoc[] +include::to_cartesianshape.asciidoc[] include::to_datetime.asciidoc[] include::to_degrees.asciidoc[] include::to_double.asciidoc[] include::to_geopoint.asciidoc[] +include::to_geoshape.asciidoc[] include::to_integer.asciidoc[] include::to_ip.asciidoc[] include::to_long.asciidoc[] diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 7062d7000115a..3bf3d8ad3d713 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +condition | rest | result |=== diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index e36316ab87bb5..2daf6126d6fb0 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +expression | expressionX | result boolean | boolean | boolean integer | integer | integer keyword | keyword | keyword diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index f422b45f0b34c..1f14abf9c498f 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +first | rest | result keyword | keyword | keyword text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 9963c85b2af85..edd244548fb18 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +date_part | field | result keyword | datetime | long |=== diff --git a/docs/reference/esql/functions/types/ends_with.asciidoc b/docs/reference/esql/functions/types/ends_with.asciidoc index 6c406b80c0cad..88489185b41f7 100644 --- a/docs/reference/esql/functions/types/ends_with.asciidoc +++ b/docs/reference/esql/functions/types/ends_with.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | suffix | result keyword | keyword | boolean +text | text | boolean |=== diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index c30a055f3be49..6899a408969f7 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -string | length | result +str | length | result keyword | integer | keyword +text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index 9af62defcb2a9..de84fe63c794a 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +str | result keyword | integer +text | integer |=== diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc index eee2d68e4653f..2f5100b1d1494 100644 --- a/docs/reference/esql/functions/types/mul.asciidoc +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -4,4 +4,5 @@ lhs | rhs | result double | double | double integer | integer | integer long | long | long +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index dd4f6b0725cc8..0bba9b341c301 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +field | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 440e66d11096e..a2e7119bab05d 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -3,9 +3,11 @@ v | result boolean | integer cartesian_point | integer +cartesian_shape | integer datetime | integer double | integer geo_point | integer +geo_shape | integer integer | integer ip | integer keyword | integer diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index e6c67a454b96b..620c7cf13b771 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -3,9 +3,11 @@ v | result boolean | boolean cartesian_point | cartesian_point +cartesian_shape | cartesian_shape datetime | datetime double | double geo_point | geo_point +geo_shape | geo_shape integer | integer ip | ip keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index e6c67a454b96b..620c7cf13b771 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -3,9 +3,11 @@ v | result boolean | boolean cartesian_point | cartesian_point +cartesian_shape | cartesian_shape datetime | datetime double | double geo_point | geo_point +geo_shape | geo_shape integer | integer ip | ip keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index f1831429aa95c..4bb9cf6c7a1cb 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index 09cb78511d275..4bb9cf6c7a1cb 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -1,5 +1,8 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result double | double +integer | integer +long | long +unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 6824d1fd97128..8c2be37bd63a0 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | arg3 | result +str | regex | newStr | result keyword | keyword | keyword | keyword keyword | keyword | text | keyword keyword | text | keyword | keyword diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index c30a055f3be49..6899a408969f7 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -string | length | result +str | length | result keyword | integer | keyword +text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 5ba9e2f776d75..33e89c91f0bfe 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +value | decimals | result double | integer | double |=== diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index f1f744dbe4126..4b5e6856c8fe2 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | delim | result keyword | keyword | keyword +text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/st_centroid.asciidoc b/docs/reference/esql/functions/types/st_centroid.asciidoc new file mode 100644 index 0000000000000..cbafb9d0fa6dc --- /dev/null +++ b/docs/reference/esql/functions/types/st_centroid.asciidoc @@ -0,0 +1,6 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +geo_point | geo_point +cartesian_point | cartesian_point +|=== diff --git a/docs/reference/esql/functions/types/starts_with.asciidoc b/docs/reference/esql/functions/types/starts_with.asciidoc index 6c406b80c0cad..863ddef3c0361 100644 --- a/docs/reference/esql/functions/types/starts_with.asciidoc +++ b/docs/reference/esql/functions/types/starts_with.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +str | prefix | result keyword | keyword | boolean +text | text | boolean |=== diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index ed26adf06ecde..826c4f6274652 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -8,4 +8,5 @@ double | double | double integer | integer | integer long | long | long time_duration | time_duration | time_duration +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index 2aa96ceeb7e43..f12a40c9253fb 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -1,5 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | arg3 | result +str | start | length | result keyword | integer | integer | keyword +text | integer | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..081d879c4b713 --- /dev/null +++ b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc @@ -0,0 +1,7 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +cartesian_point | cartesian_point +keyword | cartesian_point +text | cartesian_point +|=== diff --git a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..258a31169782d --- /dev/null +++ b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +cartesian_point | cartesian_shape +cartesian_shape | cartesian_shape +keyword | cartesian_shape +text | cartesian_shape +|=== diff --git a/docs/reference/esql/functions/types/to_geopoint.asciidoc b/docs/reference/esql/functions/types/to_geopoint.asciidoc new file mode 100644 index 0000000000000..c464aec9e983c --- /dev/null +++ b/docs/reference/esql/functions/types/to_geopoint.asciidoc @@ -0,0 +1,7 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +geo_point | geo_point +keyword | geo_point +text | geo_point +|=== diff --git a/docs/reference/esql/functions/types/to_geoshape.asciidoc b/docs/reference/esql/functions/types/to_geoshape.asciidoc new file mode 100644 index 0000000000000..5fc8611ee2f92 --- /dev/null +++ b/docs/reference/esql/functions/types/to_geoshape.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +geo_point | geo_shape +geo_shape | geo_shape +keyword | geo_shape +text | geo_shape +|=== diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index 5c063739fc5b1..307f573f1db2d 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -2,10 +2,8 @@ |=== v | result boolean | long -cartesian_point | long datetime | long double | long -geo_point | long integer | long keyword | long long | long diff --git a/docs/reference/esql/functions/types/to_lower.asciidoc b/docs/reference/esql/functions/types/to_lower.asciidoc new file mode 100644 index 0000000000000..26f4e7633d8ae --- /dev/null +++ b/docs/reference/esql/functions/types/to_lower.asciidoc @@ -0,0 +1,6 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | result +keyword | keyword +text | text +|=== diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 4de4af735b07f..773e396f41373 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -3,9 +3,11 @@ v | result boolean | keyword cartesian_point | keyword +cartesian_shape | keyword datetime | keyword double | keyword geo_point | keyword +geo_shape | keyword integer | keyword ip | keyword keyword | keyword diff --git a/docs/reference/esql/functions/types/to_upper.asciidoc b/docs/reference/esql/functions/types/to_upper.asciidoc new file mode 100644 index 0000000000000..26f4e7633d8ae --- /dev/null +++ b/docs/reference/esql/functions/types/to_upper.asciidoc @@ -0,0 +1,6 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | result +keyword | keyword +text | text +|=== diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 6cb7755b91ce9..871a741d5ee24 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -201,8 +201,8 @@ POST /_query "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, - { "name": "b+2", "type": "long"}, - { "name": "a+b", "type": "long"} + { "name": "b + 2", "type": "long"}, + { "name": "a + b", "type": "long"} ], "values": [ [1, [1, 2], null, null], @@ -236,8 +236,8 @@ POST /_query "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, - { "name": "b+2", "type": "long"}, - { "name": "a+b", "type": "long"} + { "name": "b + 2", "type": "long"}, + { "name": "a + b", "type": "long"} ], "values": [ [1, 1, 3, 2], diff --git a/docs/reference/esql/processing-commands/where.asciidoc b/docs/reference/esql/processing-commands/where.asciidoc index 973b163b08b10..3076f92c40fc0 100644 --- a/docs/reference/esql/processing-commands/where.asciidoc +++ b/docs/reference/esql/processing-commands/where.asciidoc @@ -33,6 +33,14 @@ Which, if `still_hired` is a boolean field, can be simplified to: include::{esql-specs}/docs.csv-spec[tag=whereBoolean] ---- +Use date math to retrieve data from a specific time range. For example, to +retrieve the last hour of logs: + +[source,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- + `WHERE` supports various <>. For example the <> function: diff --git a/docs/reference/how-to/knn-search.asciidoc b/docs/reference/how-to/knn-search.asciidoc index 2e0462c193f63..15e3ff7c38e86 100644 --- a/docs/reference/how-to/knn-search.asciidoc +++ b/docs/reference/how-to/knn-search.asciidoc @@ -16,8 +16,10 @@ structures. So these same recommendations also help with indexing speed. The default <> is `float`. But this can be automatically quantized during index time through <>. Quantization will reduce the -required memory by 4x, but it will also reduce the precision of the vectors. For -`float` vectors with `dim` greater than or equal to `384`, using a +required memory by 4x, but it will also reduce the precision of the vectors and +increase disk usage for the field (by up to 25%). + +For `float` vectors with `dim` greater than or equal to `384`, using a <> index is highly recommended. [discrete] diff --git a/docs/reference/ilm/apis/explain.asciidoc b/docs/reference/ilm/apis/explain.asciidoc index e63a1db2f5d8a..64e9ec9d5241b 100644 --- a/docs/reference/ilm/apis/explain.asciidoc +++ b/docs/reference/ilm/apis/explain.asciidoc @@ -98,7 +98,7 @@ GET /_cluster/health?wait_for_status=green&timeout=10s [source,console] -------------------------------------------------- -GET my-index-000001/_ilm/explain +GET my-index-000001/_ilm/explain?human -------------------------------------------------- // TEST[continued] @@ -111,18 +111,23 @@ that the index is managed and in the `new` phase: "indices": { "my-index-000001": { "index": "my-index-000001", - "index_creation_date_millis": 1538475653281, <1> - "time_since_index_creation": "15s", <2> - "managed": true, <3> - "policy": "my_policy", <4> - "lifecycle_date_millis": 1538475653281, <5> - "age": "15s", <6> + "index_creation_date_millis": 1538475653281, <1> + "index_creation_date": "2018-10-15T13:45:21.981Z", + "time_since_index_creation": "15s", <2> + "managed": true, <3> + "policy": "my_policy", <4> + "lifecycle_date_millis": 1538475653281, <5> + "lifecycle_date": "2018-10-15T13:45:21.981Z", + "age": "15s", <6> "phase": "new", - "phase_time_millis": 1538475653317, <7> + "phase_time_millis": 1538475653317, <7> + "phase_time": "2018-10-15T13:45:22.577Z", "action": "complete" - "action_time_millis": 1538475653317, <8> + "action_time_millis": 1538475653317, <8> + "action_time": "2018-10-15T13:45:22.577Z", "step": "complete", - "step_time_millis": 1538475653317 <9> + "step_time_millis": 1538475653317, <9> + "step_time": "2018-10-15T13:45:22.577Z" } } } @@ -175,13 +180,15 @@ phase completes. "min_age": "0ms", "actions": { "rollover": { - "max_age": "30s" + "max_age": "30s", + "max_primary_shard_docs": 200000000, <2> + "min_docs": 1 } } }, - "version": 3, <2> - "modified_date": "2018-10-15T13:21:41.576Z", <3> - "modified_date_in_millis": 1539609701576 <4> + "version": 3, <3> + "modified_date": "2018-10-15T13:21:41.576Z", <4> + "modified_date_in_millis": 1539609701576 <5> } } } @@ -191,9 +198,10 @@ phase completes. <1> The JSON phase definition loaded from the specified policy when the index entered this phase -<2> The version of the policy that was loaded -<3> The date the loaded policy was last modified -<4> The epoch time when the loaded policy was last modified +<2> The rollover action includes the default `max_primary_shard_docs` and `min_docs` conditions. See <> for more information. +<3> The version of the policy that was loaded +<4> The date the loaded policy was last modified +<5> The epoch time when the loaded policy was last modified If {ilm-init} is waiting for a step to complete, the response includes status information for the step that's being performed on the index. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 5d517d313b9ea..1d097c91bbedf 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -25,6 +25,7 @@ own model, use the <>. * Requires the `manage` <>. + [discrete] [[put-inference-api-desc]] ==== {api-description-title} @@ -33,10 +34,12 @@ The create {infer} API enables you to create and configure an {infer} model to perform a specific {infer} task. The following services are available through the {infer} API: + * ELSER * OpenAI * Hugging Face + [discrete] [[put-inference-api-path-params]] ==== {api-path-parms-title} @@ -63,7 +66,8 @@ The type of service supported for the specified task type. Available services: * `elser`: specify the `sparse_embedding` task type to use the ELSER service. * `openai`: specify the `text_embedding` task type to use the OpenAI service. -* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face service. +* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face +service. `service_settings`:: (Required, object) @@ -108,6 +112,26 @@ https://platform.openai.com/account/organization[**Settings** > **Organizations* The URL endpoint to use for the requests. Can be changed for testing purposes. Defaults to `https://api.openai.com/v1/embeddings`. ===== ++ +.`service_settings` for `hugging_face` +[%collapsible%closed] +===== +`api_key`::: +(Required, string) +A valid access token of your Hugging Face account. You can find your Hugging +Face access tokens or you can create a new one +https://huggingface.co/settings/tokens[on the settings page]. + +IMPORTANT: You need to provide the API key only once, during the {infer} model +creation. The <> does not retrieve your API key. After +creating the {infer} model, you cannot change the associated API key. If you +want to use a different API key, delete the {infer} model and recreate it with +the same name and the updated API key. + +`url`::: +(Required, string) +The URL endpoint to use for the requests. +===== `task_settings`:: (Optional, object) @@ -124,10 +148,18 @@ https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI do for the list of available text embedding models. ===== + [discrete] [[put-inference-api-example]] ==== {api-examples-title} +This section contains example API calls for every service type. + + +[discrete] +[[inference-example-elser]] +===== ELSER service + The following example shows how to create an {infer} model called `my-elser-model` to perform a `sparse_embedding` task type. @@ -164,6 +196,10 @@ Example response: // NOTCONSOLE +[discrete] +[[inference-example-openai]] +===== OpenAI service + The following example shows how to create an {infer} model called `openai_embeddings` to perform a `text_embedding` task type. @@ -180,4 +216,35 @@ PUT _inference/text_embedding/openai_embeddings } } ------------------------------------------------------------ -// TEST[skip:TBD] \ No newline at end of file +// TEST[skip:TBD] + + +[discrete] +[[inference-example-hugging-face]] +===== Hugging Face service + +The following example shows how to create an {infer} model called +`hugging-face_embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/hugging-face-embeddings +{ + "service": "hugging_face", + "service_settings": { + "api_key": "", <1> + "url": "" <2> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> A valid Hugging Face access token. You can find on the +https://huggingface.co/settings/tokens[settings page of your account]. +<2> The {infer} endpoint URL you created on Hugging Face. + +Create a new {infer} endpoint on +https://ui.endpoints.huggingface.co/[the Hugging Face endpoint page] to get an +endpoint URL. Select the model you want to use on the new endpoint creation page +- for example `intfloat/e5-small-v2` - then select the `Sentence Embeddings` +task under the Advanced configuration section. Create the endpoint. Copy the URL +after the endpoint initialization has been finished. \ No newline at end of file diff --git a/docs/reference/ingest/processors.asciidoc b/docs/reference/ingest/processors.asciidoc index 4132773e3d427..8622e0b98602c 100644 --- a/docs/reference/ingest/processors.asciidoc +++ b/docs/reference/ingest/processors.asciidoc @@ -4,7 +4,15 @@ Processor reference ++++ -{es} includes several configurable processors. To get a list of available +An <> is made up of a sequence of processors that are applied to documents as they are ingested into an index. +Each processor performs a specific task, such as filtering, transforming, or enriching data. + +Each successive processor depends on the output of the previous processor, so the order of processors is important. +The modified documents are indexed into {es} after all processors are applied. + +{es} includes over 40 configurable processors. +The subpages in this section contain reference documentation for each processor. +To get a list of available processors, use the <> API. [source,console] @@ -12,11 +20,191 @@ processors, use the <> API. GET _nodes/ingest?filter_path=nodes.*.ingest.processors ---- -The pages in this section contain reference documentation for each processor. +[discrete] +[[ingest-processors-categories]] +=== Ingest processors by category + +We've categorized the available processors on this page and summarized their functions. +This will help you find the right processor for your use case. + +* <> +* <> +* <> +* <> +* <> + +[discrete] +[[ingest-process-category-data-enrichment]] +=== Data enrichment processors + +[discrete] +[[ingest-process-category-data-enrichment-general]] +==== General outcomes + +<>:: +Appends a value to a field. + +<>:: +Points documents to the right time-based index based on a date or timestamp field. + +<>:: +Enriches documents with data from another index. +[TIP] +==== +Refer to <> for detailed examples of how to use the `enrich` processor to add data from your existing indices to incoming documents during ingest. +==== + +<>:: +Uses {ml} to classify and tag text fields. + +[discrete] +[[ingest-process-category-data-enrichment-specific]] +==== Specific outcomes + +<>:: +Parses and indexes binary data, such as PDFs and Word documents. + +<>:: +Converts a location field to a Geo-Point field. + +<>:: +Computes the Community ID for network flow data. + +<>:: +Computes a hash of the document’s content. + +<>:: +Converts geo-grid definitions of grid tiles or cells to regular bounding boxes or polygons which describe their shape. + +<>:: +Adds information about the geographical location of an IPv4 or IPv6 address. + +<>:: +Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. + +<>:: +Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). + +<>:: +Sets user-related details (such as `username`, `roles`, `email`, `full_name`,`metadata`, `api_key`, `realm` and `authentication_type`) from the current authenticated user to the current document by pre-processing the ingest. + +<>:: +Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. + +<>:: +URL-decodes a string. + +<>:: +Parses user-agent strings to extract information about web clients. + +[discrete] +[[ingest-process-category-data-transformation]] +=== Data transformation processors + +[discrete] +[[ingest-process-category-data-transformation-general]] +==== General outcomes + +<>:: +Converts a field in the currently ingested document to a different type, such as converting a string to an integer. + +<>:: +Extracts structured fields out of a single text field within a document. +Unlike the <>, dissect does not use regular expressions. +This makes the dissect's a simpler and often faster alternative. + +<>:: +Extracts structured fields out of a single text field within a document, using the <> regular expression dialect that supports reusable aliased expressions. + +<>:: +Converts a string field by applying a regular expression and a replacement. + +<>:: +Uses the <> rules engine to obscure text in the input document matching the given Grok patterns. + +<>:: +Renames an existing field. + +<>:: +Sets a value on a field. + +[discrete] +[[ingest-process-category-data-transformation-specific]] +==== Specific outcomes + +<>:: +Converts a human-readable byte value to its value in bytes (for example `1kb` becomes `1024`). + +<>:: +Extracts a single line of CSV data from a text field. + +<>:: +Extracts and converts date fields. + +<> processor:: +Expands a field with dots into an object field. + +<>:: +Removes HTML tags from a field. + +<>:: +Joins each element of an array into a single string using a separator character between each element. + +<>:: +Parse messages (or specific event fields) containing key-value pairs. + +<> and <>:: +Converts a string field to lowercase or uppercase. + +<>:: +Splits a field into an array of values. + +<>:: +Trims whitespace from field. + +[discrete] +[[ingest-process-category-data-filtering]] +=== Data filtering processors + +<>:: +Drops the document without raising any errors. + +<>:: +Removes fields from documents. + +[discrete] +[[ingest-process-category-pipeline-handling]] +=== Pipeline handling processors + +<>:: +Raises an exception. Useful for when you expect a pipeline to fail and want to relay a specific message to the requester. + +<>:: +Executes another pipeline. + +<>:: +Reroutes documents to another target index or data stream. + +[discrete] +[[ingest-process-category-array-json-handling]] +=== Array/JSON handling processors + +<>:: +Runs an ingest processor on each element of an array or object. + +<>:: +Converts a JSON string into a structured JSON object. + +<>:: +Runs an inline or stored <> on incoming documents. +The script runs in the {painless}/painless-ingest-processor-context.html[painless `ingest` context]. + +<>:: +Sorts the elements of an array in ascending or descending order. [discrete] [[ingest-process-plugins]] -=== Processor plugins +=== Add additional processors You can install additional processors as {plugins}/ingest.html[plugins]. diff --git a/docs/reference/ingest/processors/rename.asciidoc b/docs/reference/ingest/processors/rename.asciidoc index 9b0eeaa157d55..82b97f48519c9 100644 --- a/docs/reference/ingest/processors/rename.asciidoc +++ b/docs/reference/ingest/processors/rename.asciidoc @@ -13,7 +13,8 @@ Renames an existing field. If the field doesn't exist or the new name is already | Name | Required | Default | Description | `field` | yes | - | The field to be renamed. Supports <>. | `target_field` | yes | - | The new name of the field. Supports <>. -| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document. +| `override` | no | `false` | If `true`, the processor will update pre-existing non-null-valued fields. When set to `false`, such fields will not be touched. include::common-options.asciidoc[] |====== diff --git a/docs/reference/migration/migrate_8_12.asciidoc b/docs/reference/migration/migrate_8_12.asciidoc index d241a35c686d7..c7f4aa8728693 100644 --- a/docs/reference/migration/migrate_8_12.asciidoc +++ b/docs/reference/migration/migrate_8_12.asciidoc @@ -9,12 +9,66 @@ your application to {es} 8.12. See also <> and <>. -coming::[8.12.0] - - [discrete] [[breaking-changes-8.12]] === Breaking changes -There are no breaking changes in {es} 8.12. +There are no breaking changes in 8.12 + +[discrete] +[[notable-changes-8.12]] +=== Notable changes + +There are notable changes in 8.12 that you need to be aware of, items that we may consider as notable changes are + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non public APIs. +* Behaviour changes that repair critical bugs. + + +[discrete] +[[breaking_812_authorization_changes]] +==== Authorization changes + +[[fixed_jwt_principal_from_claims]] +.Fixed JWT principal from claims +[%collapsible] +==== +*Details* + +This changes the format of a JWT's principal before the JWT is actually validated by any JWT realm. The JWT's principal is a convenient way to refer to a JWT that has not yet been verified by a JWT realm. The JWT's principal is printed in the audit and regular logs (notably for auditing authn failures) as well as the smart realm chain reordering optimization. The JWT principal is NOT required to be identical to the JWT-authenticated user's principal, but in general, they should be similar. Previously, the JWT's principal was built by individual realms in the same way the realms built the authenticated user's principal. This had the advantage that, in simpler JWT realms configurations (e.g. a single JWT realm in the chain), the JWT principal and the authenticated user's principal are very similar. However the drawback is that, in general, the JWT principal and the user principal can be very different (i.e. in the case where one JWT realm builds the JWT principal and a different one builds the user principal). Another downside is that the (unauthenticated) JWT principal depended on realm ordering, which makes identifying the JWT from its principal dependent on the ES authn realm configuration. This PR implements a consistent fixed logic to build the JWT principal, which only depends on the JWT's claims and no ES configuration. + +*Impact* + +Users will observe changed format and values for the `user.name` attribute of `authentication_failed` audit log events, in the JWT (failed) authn case. +==== + +[discrete] +[[breaking_812_java_api_changes]] +==== Java API changes + +[[plugin_createcomponents_method_has_been_refactored_to_take_single_pluginservices_object]] +.Plugin.createComponents method has been refactored to take a single PluginServices object +[%collapsible] +==== +*Details* + +Plugin.createComponents currently takes several different service arguments. The signature of this method changes every time a new service is added. The method has now been modified to take a single interface object that new services are added to. This will reduce API incompatibility issues when a new service is introduced in the future. + +*Impact* + +Plugins that override createComponents will need to be refactored to override the new method on ES 8.12+ +==== + +[discrete] +[[breaking_812_rest_api_changes]] +==== REST API changes + +[[es_ql_pow_function_always_returns_double]] +.[ES|QL] pow function always returns double +[%collapsible] +==== +*Details* + +This corrects an earlier mistake in the ES|QL language design. Initially we had thought to have pow return the same type as its inputs, but in practice even for integer inputs this quickly grows out of the representable range, and we returned null much of the time. This also created a lot of edge cases around casting to/from doubles (which the underlying java function uses). The version in this PR follows the java spec, by always casting its inputs to doubles, and returning a double. Doing it this way also allows for a rather significant reduction in lines of code. + +*Impact* + +low. Most queries should continue to function with the change. +==== diff --git a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc index 5d72b4682d4ea..f55ab207a2689 100644 --- a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc @@ -52,6 +52,22 @@ for the periods where these delays occur: [role="screenshot"] image::images/ml-annotations.png["Delayed data annotations in the Single Metric Viewer"] +[IMPORTANT] +==== +As the `doc_count` from an aggregation is compared with the +bucket results of the job, the delayed data check will not work correctly in the +following cases: + +* if the datafeed uses aggregations and the job's `analysis_config` does not have its +`summary_count_field_name` set to `doc_count`, +* if the datafeed is _not_ using aggregations and `summary_count_field_name` is set to +any value. + +If the datafeed is using aggregations then it's highly likely that the job's +`summary_count_field_name` should be set to `doc_count`. If +`summary_count_field_name` is set to any value other than `doc_count`, the +delayed data check for the datafeed must be disabled. +==== There is another tool for visualizing the delayed data on the *Annotations* tab in the {anomaly-detect} job management page: diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index 27266d5cea659..cc6d9037bd59a 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -430,16 +430,17 @@ end::daily-model-snapshot-retention-after-days[] tag::data-description[] The data description defines the format of the input data when you send data to -the job by using the <> API. Note that when configure -a {dfeed}, these properties are automatically set. When data is received via -the <> API, it is not stored in {es}. Only the results -for {anomaly-detect} are retained. +the job by using the <> API. Note that when using a +{dfeed}, only the `time_field` needs to be set, the rest of the properties are +automatically set. When data is received via the <> API, +it is not stored in {es}. Only the results for {anomaly-detect} are retained. + .Properties of `data_description` [%collapsible%open] ==== `format`::: - (string) Only `JSON` format is supported at this time. + (string) Only `xcontent` format is supported at this time, and this is the + default value. `time_field`::: (string) The name of the field that contains the timestamp. diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 45517b99c2177..27555070e9c5a 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -709,10 +709,12 @@ be applied to the input text before the input is evaluated. The prefix may be different depending on the intention. For asymmetric tasks such as infromation retrieval the prefix applied to a passage as it is indexed can be different to the prefix applied when searching those passages. - ++ +-- `prefix_strings` has 2 options, a prefix string that is always applied in the search context and one that is always applied when ingesting the docs. Both are optional. +-- + .Properties of `prefix_strings` [%collapsible%open] @@ -725,7 +727,8 @@ originating from a search query. `ingest`::: (Optional, string) The prefix string to prepend to the input text for requests -at ingest where the {infer} ingest processor is used. // TODO is there a shortcut for Inference ingest processor? +at ingest where the {infer} ingest processor is used. +// TODO is there a shortcut for Inference ingest processor? ==== //End prefix_strings diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 6d009dfd12261..d447026fae293 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -75,27 +75,32 @@ enable forced awareness. [[forced-awareness]] ===== Forced awareness -By default, if one location fails, Elasticsearch assigns all of the missing -replica shards to the remaining locations. While you might have sufficient -resources across all locations to host your primary and replica shards, a single -location might be unable to host *ALL* of the shards. +By default, if one location fails, {es} spreads its shards across the remaining +locations. This might be undesirable if the cluster does not have sufficient +resources to host all its shards when one location is missing. -To prevent a single location from being overloaded in the event of a failure, -you can set `cluster.routing.allocation.awareness.force` so no replicas are -allocated until nodes are available in another location. +To prevent the remaining locations from being overloaded in the event of a +whole-location failure, specify the attribute values that should exist with the +`cluster.routing.allocation.awareness.force.*` settings. This will mean that +{es} will prefer to leave some replicas unassigned in the event of a +whole-location failure instead of overloading the nodes in the remaining +locations. -For example, if you have an awareness attribute called `zone` and configure nodes -in `zone1` and `zone2`, you can use forced awareness to prevent Elasticsearch -from allocating replicas if only one zone is available: +For example, if you have an awareness attribute called `zone` and configure +nodes in `zone1` and `zone2`, you can use forced awareness to make {es} leave +half of your shard copies unassigned if only one zone is available: [source,yaml] ------------------------------------------------------------------- cluster.routing.allocation.awareness.attributes: zone cluster.routing.allocation.awareness.force.zone.values: zone1,zone2 <1> ------------------------------------------------------------------- -<1> Specify all possible values for the awareness attribute. - -With this example configuration, if you start two nodes with `node.attr.zone` set -to `zone1` and create an index with 5 shards and 1 replica, Elasticsearch creates -the index and allocates the 5 primary shards but no replicas. Replicas are -only allocated once nodes with `node.attr.zone` set to `zone2` are available. +<1> Specify all possible `zone` attribute values. + +With this example configuration, if you have two nodes with `node.attr.zone` +set to `zone1` and an index with `number_of_replicas` set to `1`, {es} +allocates all the primary shards but none of the replicas. It will assign the +replica shards once nodes with a different value for `node.attr.zone` join the +cluster. In contrast, if you do not configure forced awareness, {es} will +allocate all primaries and replicas to the two nodes even though they are in +the same zone. diff --git a/docs/reference/modules/cluster/disk_allocator.asciidoc b/docs/reference/modules/cluster/disk_allocator.asciidoc index e0b3b7cb0008c..02cc48c6e27fc 100644 --- a/docs/reference/modules/cluster/disk_allocator.asciidoc +++ b/docs/reference/modules/cluster/disk_allocator.asciidoc @@ -46,18 +46,12 @@ resolve persistent watermark errors. ==== It is normal for the nodes in your cluster to be using very different amounts of disk space. The <> of the cluster -depends only on the number of shards on each node and the indices to which -those shards belong. It considers neither the sizes of these shards nor the -available disk space on each node, for the following reasons: - -* Disk usage changes over time. Balancing the disk usage of individual nodes -would require a lot more shard movements, perhaps even wastefully undoing -earlier movements. Moving a shard consumes resources such as I/O and network -bandwidth and may evict data from the filesystem cache. These resources are -better spent handling your searches and indexing where possible. - -* A cluster with equal disk usage on every node typically performs no better -than one that has unequal disk usage, as long as no disk is too full. +depends on a combination of factors which includes the number of shards on each +node, the indices to which those shards belong, and the resource needs of each +shard in terms of its size on disk and its CPU usage. {es} must trade off all +of these factors against each other, and a cluster which is balanced when +looking at the combination of all of these factors may not appear to be +balanced if you focus attention on just one of them. ==== You can use the following settings to control disk-based allocation: diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc index f9cc31748ef71..1f4297cb4e089 100644 --- a/docs/reference/query-dsl/knn-query.asciidoc +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -27,6 +27,9 @@ PUT my-image-index }, "file-type": { "type": "keyword" + }, + "title": { + "type": "text" } } } @@ -39,11 +42,11 @@ PUT my-image-index ---- POST my-image-index/_bulk?refresh=true { "index": { "_id": "1" } } -{ "image-vector": [1, 5, -20], "file-type": "jpg" } +{ "image-vector": [1, 5, -20], "file-type": "jpg", "title": "mountain lake" } { "index": { "_id": "2" } } -{ "image-vector": [42, 8, -15], "file-type": "png" } +{ "image-vector": [42, 8, -15], "file-type": "png", "title": "frozen lake"} { "index": { "_id": "3" } } -{ "image-vector": [15, 11, 23], "file-type": "jpg" } +{ "image-vector": [15, 11, 23], "file-type": "jpg", "title": "mountain lake lodge" } ---- //TEST[continued] @@ -176,6 +179,47 @@ POST my-image-index/_search ---- //TEST[continued] +[[knn-query-in-hybrid-search]] +==== Hybrid search with knn query +Knn query can be used as a part of hybrid search, where knn query is combined +with other lexical queries. For example, the query below finds documents with +`title` matching `mountain lake`, and combines them with the top 10 documents +that have the closest image vectors to the `query_vector`. The combined documents +are then scored and the top 3 top scored documents are returned. + ++ +[source,console] +---- +POST my-image-index/_search +{ + "size" : 3, + "query": { + "bool": { + "should": [ + { + "match": { + "title": { + "query": "mountain lake", + "boost": 1 + } + } + }, + { + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "num_candidates": 10, + "boost": 2 + } + } + ] + } + } +} +---- +//TEST[continued] + + [[knn-query-with-nested-query]] ==== Knn query inside a nested query @@ -219,4 +263,3 @@ Thus, the final results from aggregations contain `num_candidates * number_of_shards` documents. This is different from the <> where aggregations are calculated on the global top k nearest documents. - diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 068cb3d2f127b..1aebf005a64e3 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -8,6 +8,7 @@ This section summarizes the changes in each release. * <> * <> +* <> * <> * <> * <> @@ -60,6 +61,7 @@ This section summarizes the changes in each release. include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.0.asciidoc[] +include::release-notes/8.11.4.asciidoc[] include::release-notes/8.11.3.asciidoc[] include::release-notes/8.11.2.asciidoc[] include::release-notes/8.11.1.asciidoc[] diff --git a/docs/reference/release-notes/8.11.4.asciidoc b/docs/reference/release-notes/8.11.4.asciidoc new file mode 100644 index 0000000000000..0fd57c97b1a89 --- /dev/null +++ b/docs/reference/release-notes/8.11.4.asciidoc @@ -0,0 +1,31 @@ +[[release-notes-8.11.4]] +== {es} version 8.11.4 + +Also see <>. + +[[bug-8.11.4]] +[float] +=== Bug fixes + +EQL:: +* Fix NPE on missing event queries {es-pull}103611[#103611] (issue: {es-issue}103608[#103608]) + +ES|QL:: +* Fix now in millis for ESQL search contexts {es-pull}103474[#103474] (issue: {es-issue}103455[#103455]) +* Fix the transport version of `PlanStreamOutput` {es-pull}103758[#103758] +* `AsyncOperator#isFinished` must never return true on failure {es-pull}104029[#104029] + +Infra/Scripting:: +* Wrap painless explain error {es-pull}103151[#103151] (issue: {es-issue}103018[#103018]) + +Mapping:: +* Revert change {es-pull}103865[#103865] + +Snapshot/Restore:: +* Decref `SharedBytes.IO` after read is done not before {es-pull}102848[#102848] +* Restore `SharedBytes.IO` refcounting on reads & writes {es-pull}102843[#102843] + +Watcher:: +* Fix: Watcher REST API `GET /_watcher/settings` now includes product header {es-pull}103003[#103003] (issue: {es-issue}102928[#102928]) + + diff --git a/docs/reference/release-notes/8.12.0.asciidoc b/docs/reference/release-notes/8.12.0.asciidoc index 6355b7c5135db..4c0fc50584b9f 100644 --- a/docs/reference/release-notes/8.12.0.asciidoc +++ b/docs/reference/release-notes/8.12.0.asciidoc @@ -1,8 +1,428 @@ [[release-notes-8.12.0]] == {es} version 8.12.0 -coming[8.12.0] - Also see <>. +[[known-issues-8.12.0]] +[float] +=== Known issues + +* `int8_hnsw` vector index format may fail to merge segments and prevent from indexing documents (issue: {es-issue}104617[#104617]) ++ +When using `int8_hnsw` and the default `confidence_interval` (or any `confidence_interval` less than `1.0`) and when +there are deleted documents in the segments, quantiles may fail to build and prevent merging. + +This issue is fixed in 8.12.1. + +[[breaking-8.12.0]] +[float] +=== Breaking changes +There are no breaking changes in 8.12 + +[[notable-8.12.0]] +[float] +=== Notable changes +There are notable changes in 8.12 that you need to be aware of but that we do not consider breaking, items that we may consider as notable changes are + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non-public APIs. +* Behaviour changes that repair critical bugs. + +Authorization:: +* Fixed JWT principal from claims {es-pull}101333[#101333] + +ES|QL:: +* [ES|QL] pow function always returns double {es-pull}102183[#102183] (issue: {es-issue}99055[#99055]) + +Infra/Plugins:: +* Remove Plugin.createComponents method in favour of overload with a PluginServices object {es-pull}101457[#101457] + +[[bug-8.12.0]] +[float] +=== Bug fixes + +Aggregations:: +* Adjust Histogram's bucket accounting to be iteratively {es-pull}102172[#102172] +* Aggs error codes part 1 {es-pull}99963[#99963] +* Skip global ordinals loading if query does not match after rewrite {es-pull}102844[#102844] +* Trigger parent circuit breaker when building scorers in filters aggregation {es-pull}102511[#102511] +* Unwrap `ExecutionException` when loading from cache in `AbstractIndexOrdinalsFieldData` {es-pull}102476[#102476] + +Application:: +* [Connector API] Fix bug with nullable tooltip field in parser {es-pull}103427[#103427] +* [Connectors API] Fix `ClassCastException` when creating a new sync job {es-pull}103508[#103508] +* [Connectors API] Fix bug with missing TEXT `DisplayType` enum {es-pull}103430[#103430] +* [Connectors API] Handle nullable fields correctly in the `ConnectorSyncJob` parser {es-pull}103183[#103183] +* [Profiling] Query in parallel only if beneficial {es-pull}103061[#103061] +* [Search Applications] Return 400 response when template rendering produces invalid JSON {es-pull}101474[#101474] + +Authentication:: +* Fall through malformed JWTs to subsequent realms in the chain {es-pull}101660[#101660] (issue: {es-issue}101367[#101367]) + +Authorization:: +* Fix cache invalidation on privilege modification {es-pull}102193[#102193] + +Data streams:: +* Use dataset size instead of on-disk size for data stream stats {es-pull}103342[#103342] + +Distributed:: +* Active shards message corrected for search shards {es-pull}102808[#102808] (issue: {es-issue}101896[#101896]) +* Dispatch `ClusterStateAction#buildResponse` to executor {es-pull}103435[#103435] +* Fix listeners in `SharedBlobCacheService.readMultiRegions` {es-pull}101727[#101727] + +Downsampling:: +* Copy counter field properties to downsampled index {es-pull}103580[#103580] (issue: {es-issue}103569[#103569]) +* Fix downsample api by returning a failure in case one or more downsample persistent tasks failed {es-pull}103615[#103615] + +EQL:: +* Cover head/tail commands edge cases and data types coverage {es-pull}101859[#101859] (issue: {es-issue}101724[#101724]) +* Fix NPE on missing event queries {es-pull}103611[#103611] (issue: {es-issue}103608[#103608]) +* Samples should check if the aggregations result is empty or null {es-pull}103574[#103574] + +ES|QL:: +* ESQL: Fix `to_degrees()` returning infinity {es-pull}103209[#103209] (issue: {es-issue}102987[#102987]) +* ESQL: Fix planning of MV_EXPAND with foldable expressions {es-pull}101385[#101385] (issue: {es-issue}101118[#101118]) +* ESQL: Fix rare bug with empty string {es-pull}102350[#102350] (issue: {es-issue}101969[#101969]) +* ESQL: Fix resolution of MV_EXPAND after KEEP * {es-pull}103339[#103339] (issue: {es-issue}103331[#103331]) +* ESQL: Fix single value query {es-pull}102317[#102317] (issue: {es-issue}102298[#102298]) +* ESQL: Improve local folding of aggregates {es-pull}103670[#103670] +* ESQL: Improve pushdown of certain filters {es-pull}103671[#103671] +* ESQL: Narrow catch in convert functions {es-pull}101788[#101788] (issue: {es-issue}100820[#100820]) +* ESQL: Update the use of some user-caused exceptions {es-pull}104046[#104046] +* ESQL: remove `time_zone` request parameter {es-pull}102767[#102767] (issue: {es-issue}102159[#102159]) +* ES|QL: Fix NPE on single value detection {es-pull}103150[#103150] (issue: {es-issue}103141[#103141]) +* ES|QL: Improve resolution error management in `mv_expand` {es-pull}102967[#102967] (issue: {es-issue}102964[#102964]) +* Fix layout for MV_EXPAND {es-pull}102916[#102916] (issue: {es-issue}102912[#102912]) +* Fix now in millis for ESQL search contexts {es-pull}103474[#103474] (issue: {es-issue}103455[#103455]) +* Fix planning of duplicate aggs {es-pull}102165[#102165] (issue: {es-issue}102083[#102083]) +* Fix the transport version of `PlanStreamOutput` {es-pull}103758[#103758] +* `AsyncOperator#isFinished` must never return true on failure {es-pull}104029[#104029] + +Engine:: +* Fix `lastUnsafeSegmentGenerationForGets` for realtime get {es-pull}101700[#101700] + +Geo:: +* Fix geo tile bounding boxes to be consistent with arithmetic method {es-pull}100826[#100826] (issues: {es-issue}92611[#92611], {es-issue}95574[#95574]) + +ILM+SLM:: +* Collect data tiers usage stats more efficiently {es-pull}102140[#102140] (issue: {es-issue}100230[#100230]) + +Indices APIs:: +* Fix template simulate setting application ordering {es-pull}103024[#103024] (issue: {es-issue}103008[#103008]) + +Infra/Core:: +* Cache component versions {es-pull}103408[#103408] (issue: {es-issue}102103[#102103]) +* Fix metric gauge creation model {es-pull}100609[#100609] + +Infra/Node Lifecycle:: +* Wait for reroute before acking put-shutdown {es-pull}103251[#103251] + +Infra/Plugins:: +* Making classname optional in Transport protocol {es-pull}99702[#99702] (issue: {es-issue}98584[#98584]) + +Infra/Scripting:: +* Make IPAddress writeable {es-pull}101093[#101093] (issue: {es-issue}101082[#101082]) +* Wrap painless explain error {es-pull}103151[#103151] (issue: {es-issue}103018[#103018]) + +Infra/Settings:: +* Report full stack trace for non-state file settings transforms {es-pull}101346[#101346] + +Ingest Node:: +* Sending an index name to `DocumentParsingObserver` that is not ever null {es-pull}100862[#100862] + +License:: +* Error log when license verification fails locally {es-pull}102919[#102919] + +Machine Learning:: +* Catch exceptions during `pytorch_inference` startup {es-pull}103873[#103873] +* Exclude quantiles when fetching model snapshots where possible {es-pull}103530[#103530] +* Fix `frequent_item_sets` aggregation on empty index {es-pull}103116[#103116] (issue: {es-issue}103067[#103067]) +* If trained model download task is in progress, wait for it to finish before executing start trained model deployment {es-pull}102944[#102944] +* Persist data counts on job close before results index refresh {es-pull}101147[#101147] +* Preserve response headers in Datafeed preview {es-pull}103923[#103923] +* Prevent attempts to access non-existent node information during rebalancing {es-pull}103361[#103361] +* Prevent resource over-subscription in model allocation planner {es-pull}100392[#100392] +* Start a new trace context before loading a trained model {es-pull}103124[#103124] +* Wait for the model results on graceful shutdown {es-pull}103591[#103591] (issue: {es-issue}103414[#103414]) + +Mapping:: +* Revert change {es-pull}103865[#103865] + +Monitoring:: +* [Monitoring] Dont get cluster state until recovery {es-pull}100565[#100565] + +Network:: +* Ensure the correct `threadContext` for `RemoteClusterNodesAction` {es-pull}101050[#101050] + +Ranking:: +* Add an additional tiebreaker to RRF {es-pull}101847[#101847] (issue: {es-issue}101232[#101232]) + +Reindex:: +* Allow prefix index naming while reindexing from remote {es-pull}96968[#96968] (issue: {es-issue}89120[#89120]) + +Search:: +* Add JIT compiler excludes for `computeCommonPrefixLengthAndBuildHistogram` {es-pull}103112[#103112] +* Check that scripts produce correct json in render template action {es-pull}101518[#101518] (issue: {es-issue}101477[#101477]) +* Fix NPE & empty result handling in `CountOnlyQueryPhaseResultConsumer` {es-pull}103203[#103203] +* Fix format string in `OldLuceneVersions` {es-pull}103185[#103185] +* Handle timeout on standalone rewrite calls {es-pull}103546[#103546] +* Introduce Elasticsearch `PostingFormat` based on Lucene 90 positing format using PFOR {es-pull}103601[#103601] (issue: {es-issue}103002[#103002]) +* Restore inter-segment search concurrency with synthetic source is enabled {es-pull}103690[#103690] +* Support complex datemath expressions in index and index alias names {es-pull}100646[#100646] + +Snapshot/Restore:: +* Decref `SharedBytes.IO` after read is done not before {es-pull}102848[#102848] +* More consistent logging messages for snapshot deletion {es-pull}101024[#101024] +* Reroute on shard snapshot completion {es-pull}101585[#101585] (issue: {es-issue}101514[#101514]) +* Restore `SharedBytes.IO` refcounting on reads & writes {es-pull}102843[#102843] + +TSDB:: +* Throw when wrapping rate agg in `DeferableBucketAggregator` {es-pull}101032[#101032] + +Transform:: +* Add an assertion to the testTransformFeatureReset test case {es-pull}100287[#100287] +* Consider search context missing exceptions as recoverable {es-pull}102602[#102602] +* Consider task cancelled exceptions as recoverable {es-pull}100828[#100828] +* Fix NPE that is thrown by `_update` API {es-pull}104051[#104051] (issue: {es-issue}104048[#104048]) +* Log stacktrace together with log message in order to help debugging {es-pull}101607[#101607] +* Split comma-separated source index strings into separate indices {es-pull}102811[#102811] (issue: {es-issue}99564[#99564]) + +Vector Search:: +* Disallow vectors whose magnitudes will not fit in a float {es-pull}100519[#100519] + +Watcher:: +* Correctly logging watcher history write failures {es-pull}101802[#101802] +* Fix: Watcher REST API `GET /_watcher/settings` now includes product header {es-pull}103003[#103003] (issue: {es-issue}102928[#102928]) + +[[enhancement-8.12.0]] +[float] +=== Enhancements + +Aggregations:: +* Check the real memory circuit breaker when building global ordinals {es-pull}102462[#102462] +* Disable concurrency for sampler and diversified sampler {es-pull}102832[#102832] +* Disable parallelism for composite agg against high cardinality fields {es-pull}102644[#102644] +* Enable concurrency for multi terms agg {es-pull}102710[#102710] +* Enable concurrency for scripted metric agg {es-pull}102461[#102461] +* Enable inter-segment concurrency for terms aggs {es-pull}101390[#101390] +* Export circuit breaker trip count as a counter metric {es-pull}101423[#101423] +* Introduce fielddata cache ttl {es-pull}102682[#102682] +* Status codes for Aggregation errors, part 2 {es-pull}100368[#100368] +* Support keyed histograms {es-pull}101826[#101826] (issue: {es-issue}100242[#100242]) + +Allocation:: +* Add more desired balance stats {es-pull}102065[#102065] +* Add undesired shard count {es-pull}101426[#101426] +* Expose reconciliation metrics via APM {es-pull}102244[#102244] + +Application:: +* Calculate CO2 and emmission and costs {es-pull}101979[#101979] +* Consider duplicate stacktraces in custom index {es-pull}102292[#102292] +* Enable Universal Profiling as Enterprise feature {es-pull}100333[#100333] +* Include totals in flamegraph response {es-pull}101126[#101126] +* Retrieve stacktrace events from a custom index {es-pull}102020[#102020] +* [Profiling] Notify early about task cancellation {es-pull}102740[#102740] +* [Profiling] Report in status API if docs exist {es-pull}102735[#102735] + +Authentication:: +* Add ldap user metadata mappings for full name and email {es-pull}102925[#102925] +* Add manage_enrich cluster privilege to kibana_system role {es-pull}101682[#101682] + +Authorization:: +* Remove `auto_configure` privilege for profiling {es-pull}101026[#101026] +* Use `BulkRequest` to store Application Privileges {es-pull}102056[#102056] +* Use non-deprecated SAML callback URL in SAML smoketests {es-pull}99983[#99983] (issue: {es-issue}99986[#99986]) +* Use non-deprecated SAML callback URL in tests {es-pull}99983[#99983] (issue: {es-issue}99985[#99985]) + +CAT APIs:: +* Expose roles by default in cat allocation API {es-pull}101753[#101753] + +CRUD:: +* Cache resolved index for mgets {es-pull}101311[#101311] + +Data streams:: +* Introduce new endpoint to expose data stream lifecycle stats {es-pull}101845[#101845] +* Switch logs data streams to search all fields by default {es-pull}102456[#102456] (issue: {es-issue}99872[#99872]) + +Distributed:: +* Add support for configuring proxy scheme in S3 client settings and EC2 discovery plugin {es-pull}102495[#102495] (issue: {es-issue}101873[#101873]) +* Introduce a `StreamOutput` that counts how many bytes are written to the stream {es-pull}102906[#102906] +* Push s3 requests count via metrics API {es-pull}100383[#100383] +* Record operation purpose for s3 stats collection {es-pull}100236[#100236] + +EQL:: +* Add error logging for *QL {es-pull}101057[#101057] +* Use the eql query filter for the open-pit request {es-pull}103212[#103212] + +ES|QL:: +* ESQL: Add `profile` option {es-pull}102713[#102713] +* ESQL: Alias duplicated aggregations in a stats {es-pull}100642[#100642] (issue: {es-issue}100544[#100544]) +* ESQL: Load more than one field at once {es-pull}102192[#102192] +* ESQL: Load stored fields sequentially {es-pull}102727[#102727] +* ESQL: Load text field from parent keyword field {es-pull}102490[#102490] (issue: {es-issue}102473[#102473]) +* ESQL: Make blocks ref counted {es-pull}100408[#100408] +* ESQL: Make fieldcaps calls lighter {es-pull}102510[#102510] (issues: {es-issue}101763[#101763], {es-issue}102393[#102393]) +* ESQL: More tracking in `BlockHash` impls {es-pull}101488[#101488] +* ESQL: New telemetry commands {es-pull}102937[#102937] +* ESQL: Share constant null Blocks {es-pull}102673[#102673] +* ESQL: Short circuit loading empty doc values {es-pull}102434[#102434] +* ESQL: Support the `_source` metadata field {es-pull}102391[#102391] +* ESQL: Track blocks emitted from lucene {es-pull}101396[#101396] +* ESQL: Track memory from values loaded from lucene {es-pull}101383[#101383] +* Fast path for reading single doc with ordinals {es-pull}102902[#102902] +* Introduce local block factory {es-pull}102901[#102901] +* Load different way {es-pull}101235[#101235] +* Track ESQL enrich memory {es-pull}102184[#102184] +* Track blocks in `AsyncOperator` {es-pull}102188[#102188] +* Track blocks of intermediate state of aggs {es-pull}102562[#102562] +* Track blocks when hashing single multi-valued field {es-pull}102612[#102612] +* Track pages in ESQL enrich request/response {es-pull}102190[#102190] + +Engine:: +* Add static node settings to set default values for max merged segment sizes {es-pull}102208[#102208] + +Geo:: +* Add runtime field of type `geo_shape` {es-pull}100492[#100492] (issue: {es-issue}61299[#61299]) + +Health:: +* Add message field to `HealthPeriodicLogger` and `S3RequestRetryStats` {es-pull}101989[#101989] +* Add non-green indicator names to `HealthPeriodicLogger` message {es-pull}102245[#102245] + +ILM+SLM:: +* Health Report API should not return RED for unassigned cold/frozen shards when data is available {es-pull}100776[#100776] +* Switch fleet's built-in ILM policies to use .actions.rollover.max_primary_shard_size {es-pull}99984[#99984] (issue: {es-issue}99983[#99983]) + +Indices APIs:: +* Add executed pipelines to bulk api response {es-pull}100031[#100031] +* Add support for marking component templates as deprecated {es-pull}101148[#101148] (issue: {es-issue}100992[#100992]) +* Allowing non-dynamic index settings to be updated by automatically unassigning shards {es-pull}101723[#101723] +* Rename component templates and pipelines according to the new naming conventions {es-pull}99975[#99975] +* Run `TransportGetAliasesAction` on local node {es-pull}101815[#101815] + +Infra/CLI:: +* Set `ActiveProcessorCount` when `node.processors` is set {es-pull}101846[#101846] + +Infra/Core:: +* Add apm api for asynchronous counters (always increasing) {es-pull}102598[#102598] +* Log errors in `RestResponse` regardless of `error_trace` parameter {es-pull}101066[#101066] (issue: {es-issue}100884[#100884]) + +Infra/Logging:: +* Add status code to `rest.suppressed` log output {es-pull}100990[#100990] + +Ingest Node:: +* Deprecate the unused `elasticsearch_version` field of enrich policy json {es-pull}103013[#103013] +* Optimize `MurmurHash3` {es-pull}101202[#101202] + +Machine Learning:: +* Accept a single or multiple inputs to `_inference` {es-pull}102075[#102075] +* Add basic telelemetry for the inference feature {es-pull}102877[#102877] +* Add internal inference action for ml models an services {es-pull}102731[#102731] +* Add prefix strings option to trained models {es-pull}102089[#102089] +* Estimate the memory required to deploy trained models more accurately {es-pull}98874[#98874] +* Improve stability of spike and dip detection for the change point aggregation {es-pull}102637[#102637] +* Include ML processor limits in `_ml/info` response {es-pull}101392[#101392] +* Read scores from downloaded vocabulary for XLM Roberta tokenizers {es-pull}101868[#101868] +* Support for GET all models and by task type in the `_inference` API {es-pull}102806[#102806] + +Mapping:: +* Improve analyzer reload log message {es-pull}102273[#102273] + +Monitoring:: +* Add memory utilization Kibana metric to the monitoring index templates {es-pull}102810[#102810] +* Added `beat.stats.libbeat.pipeline.queue.max_events` {es-pull}102570[#102570] + +Network:: +* Record more detailed HTTP stats {es-pull}99852[#99852] + +Search:: +* Add metrics to the shared blob cache {es-pull}101577[#101577] +* Add support for Serbian Language Analyzer {es-pull}100921[#100921] +* Add support for `index_filter` to open pit {es-pull}102388[#102388] (issue: {es-issue}99740[#99740]) +* Added metric for cache eviction of entries with non zero frequency {es-pull}100570[#100570] +* Disable inter-segment concurrency when sorting by field {es-pull}101535[#101535] +* Enable query phase parallelism within a single shard {es-pull}101230[#101230] (issue: {es-issue}80693[#80693]) +* Node stats as metrics {es-pull}102248[#102248] +* Optimize `_count` type API requests {es-pull}102888[#102888] + +Security:: +* Expose the `invalidation` field in Get/Query `ApiKey` APIs {es-pull}102472[#102472] +* Make `api_key.delete.interval` a dynamic setting {es-pull}102680[#102680] + +Snapshot/Restore:: +* Fail S3 repository analysis on partial reads {es-pull}102840[#102840] +* Parallelize stale index deletion {es-pull}100316[#100316] (issue: {es-issue}61513[#61513]) +* Repo analysis of uncontended register behaviour {es-pull}101185[#101185] +* Repo analysis: allow configuration of register ops {es-pull}102051[#102051] +* Repo analysis: verify empty register {es-pull}102048[#102048] + +Stats:: +* Introduce includeShardsStats in the stats request to indicate that we only fetch a summary {es-pull}100466[#100466] (issue: {es-issue}99744[#99744]) +* Set includeShardsStats = false in NodesStatsRequest where the caller does not use shards-level statistics {es-pull}100938[#100938] + +Store:: +* Add methods for adding generation listeners with primary term {es-pull}100899[#100899] +* Allow executing multiple periodic flushes while they are being made durable {es-pull}102571[#102571] +* Pass shard's primary term to Engine#addSegmentGenerationListener {es-pull}99752[#99752] + +Transform:: +* Implement exponential backoff for transform state persistence retrying {es-pull}102512[#102512] (issue: {es-issue}102528[#102528]) +* Make tasks that calculate checkpoints time out {es-pull}101055[#101055] +* Pass source query to `_field_caps` (as `index_filter`) when deducing destination index mappings for better performance {es-pull}102379[#102379] +* Pass transform source query as `index_filter` to `open_point_in_time` request {es-pull}102447[#102447] (issue: {es-issue}101049[#101049]) +* Skip shards that don't match the source query during checkpointing {es-pull}102138[#102138] + +Vector Search:: +* Add vector_operation_count in profile output for knn searches {es-pull}102032[#102032] +* Make cosine similarity faster by storing magnitude and normalizing vectors {es-pull}99445[#99445] + +[[feature-8.12.0]] +[float] +=== New features + +Application:: +* Enable Connectors API as technical preview {es-pull}102994[#102994] +* [Behavioral Analytics] Analytics collections use Data Stream Lifecycle (DSL) instead of Index Lifecycle Management (ILM) for data retention management. Behavioral analytics has traditionally used ILM to manage data retention. Starting with 8.12.0, this will change. Analytics collections created prior to 8.12.0 will continue to use their existing ILM policies, but new analytics collections will be managed using DSL. {es-pull}100033[#100033] + +Authentication:: +* Patterns support for allowed subjects by the JWT realm {es-pull}102426[#102426] + +Cluster Coordination:: +* Add a node feature join barrier. This prevents nodes from joining clusters that do not have all the features already present in the cluster. This ensures that once a features is supported by all the nodes in a cluster, that feature will never then not be supported in the future. This is the corresponding functionality for the version join barrier, but for features + {es-pull}101609[#101609] + +Data streams:: +* Add ability to create a data stream failure store {es-pull}99134[#99134] + +ES|QL:: +* ESQL: emit warnings from single-value functions processing multi-values {es-pull}102417[#102417] (issue: {es-issue}98743[#98743]) +* GEO_POINT and CARTESIAN_POINT type support {es-pull}102177[#102177] + +Infra/Core:: +* Create new cluster state API for querying features present on a cluster {es-pull}100974[#100974] + +Ingest Node:: +* Adding a simulate ingest api {es-pull}101409[#101409] + +Security:: +* Allow granting API keys with JWT as the access_token {es-pull}101904[#101904] + +Vector Search:: +* Add byte quantization for float vectors in HNSW {es-pull}102093[#102093] +* Make knn search a query {es-pull}98916[#98916] + +[[regression-8.12.0]] +[float] +=== Regressions + +Infra/Core:: +* Revert non-semantic `NodeInfo` {es-pull}102636[#102636] + +[[upgrade-8.12.0]] +[float] +=== Upgrades + +Search:: +* Upgrade to Lucene 9.9.1 {es-pull}103549[#103549] + diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 7757e7c2f7926..6395c8800bb39 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -25,7 +25,6 @@ not be included yet. * <> * <> * <> -* <> * <> * <> * <> @@ -54,6 +53,7 @@ not be included yet. * <> * <> * <> +* <> * <> * <> * <> @@ -75,7 +75,6 @@ include::{es-repo-dir}/eql/eql-apis.asciidoc[] include::{es-repo-dir}/esql/esql-apis.asciidoc[] include::{es-repo-dir}/features/apis/features-apis.asciidoc[] include::{es-repo-dir}/fleet/index.asciidoc[] -include::{es-repo-dir}/text-structure/apis/find-structure.asciidoc[leveloffset=+1] include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/indices.asciidoc[] include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] @@ -103,6 +102,7 @@ include::{es-repo-dir}/snapshot-restore/apis/snapshot-restore-apis.asciidoc[] include::{es-repo-dir}/slm/apis/slm-api.asciidoc[] include::{es-repo-dir}/sql/apis/sql-apis.asciidoc[] include::{es-repo-dir}/synonyms/apis/synonyms-apis.asciidoc[] +include::{es-repo-dir}/text-structure/apis/index.asciidoc[] include::{es-repo-dir}/transform/apis/index.asciidoc[] include::usage.asciidoc[] include::{es-repo-dir}/rest-api/watcher.asciidoc[] diff --git a/docs/reference/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc index d88622db7006a..94b632490ad86 100644 --- a/docs/reference/rest-api/security.asciidoc +++ b/docs/reference/rest-api/security.asciidoc @@ -95,6 +95,7 @@ native realm: * <> * <> * <> +* <> [discrete] [[security-service-account-apis]] @@ -187,6 +188,7 @@ include::security/get-role-mappings.asciidoc[] include::security/get-roles.asciidoc[] include::security/get-service-accounts.asciidoc[] include::security/get-service-credentials.asciidoc[] +include::security/get-settings.asciidoc[] include::security/get-tokens.asciidoc[] include::security/get-user-privileges.asciidoc[] @@ -199,7 +201,9 @@ include::security/oidc-prepare-authentication-api.asciidoc[] include::security/oidc-authenticate-api.asciidoc[] include::security/oidc-logout-api.asciidoc[] include::security/query-api-key.asciidoc[] +include::security/query-user.asciidoc[] include::security/update-api-key.asciidoc[] +include::security/update-settings.asciidoc[] include::security/bulk-update-api-keys.asciidoc[] include::security/saml-prepare-authentication-api.asciidoc[] include::security/saml-authenticate-api.asciidoc[] diff --git a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc index 8f75293e2c1a4..bd2d21317212b 100644 --- a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc +++ b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc @@ -108,12 +108,14 @@ A successful call returns an object with "cluster" and "index" fields. "none", "post_behavioral_analytics_event", "read_ccr", + "read_connector_secrets", "read_fleet_secrets", "read_ilm", "read_pipeline", "read_security", "read_slm", "transport_client", + "write_connector_secrets", "write_fleet_secrets" ], "index" : [ diff --git a/docs/reference/rest-api/security/get-settings.asciidoc b/docs/reference/rest-api/security/get-settings.asciidoc index d402c74b5c46b..5c38b96903cbd 100644 --- a/docs/reference/rest-api/security/get-settings.asciidoc +++ b/docs/reference/rest-api/security/get-settings.asciidoc @@ -5,17 +5,21 @@ Get Security settings ++++ +[[security-api-get-settings-prereqs]] ==== {api-prereq-title} * To use this API, you must have at least the `read_security` cluster privilege. +[[security-api-get-settings-desc]] ==== {api-description-title} -This API allows a user to retrieve the user-configurable settings for the Security internal index (`.security` and associated indices). Only a subset of the index settings — those that are user-configurable—will be shown. This includes: +This API allows a user to retrieve the user-configurable settings for the +Security internal index (`.security` and associated indices). Only a subset of +the index settings — those that are user-configurable—will be shown. This includes: - `index.auto_expand_replicas` - `index.number_of_replicas` -An example of retrieving the Security settings: +An example of retrieving the security settings: [source,console] ----------------------------------------------------------- @@ -24,4 +28,5 @@ GET /_security/settings // TEST[setup:user_profiles] // TEST[setup:service_token42] -The configurable settings can be modified using the <> API. +The configurable settings can be modified using the +<> API. diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index 0e5973a010a47..a08a8fd1858b6 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -54,7 +54,7 @@ The query supports a subset of query types, including <>, <>, <>, <>, <>, <>, <>, <>, -and <>. +<>, and <> + You can query the following public values associated with an API key. + @@ -64,6 +64,11 @@ You can query the following public values associated with an API key. `id`:: ID of the API key. Note `id` must be queried with the <> query. +`type`:: +API keys can be of type `rest`, if created via the <> or +the <> APIs, or of type `cross_cluster` if created via +the <> API. + `name`:: Name of the API key. @@ -87,9 +92,13 @@ Username of the API key owner. Realm name of the API key owner. `metadata`:: -Metadata field associated with the API key, such as `metadata.my_field`. Because -metadata is stored as a <> field type, all fields act like -`keyword` fields when querying and sorting. +Metadata field associated with the API key, such as `metadata.my_field`. +Metadata is internally indexed as a <> field type. +This means that all fields act like `keyword` fields when querying and sorting. +It's not possible to refer to a subset of metadata fields using wildcard +patterns, e.g. `metadata.field*`, even for query types that support field +name patterns. Lastly, all the metadata fields can be searched together when +simply mentioning `metadata` (not followed by any dot and sub-field name). NOTE: You cannot query the role descriptors of an API key. ==== diff --git a/docs/reference/rest-api/security/query-user.asciidoc b/docs/reference/rest-api/security/query-user.asciidoc new file mode 100644 index 0000000000000..08ead0f389ee9 --- /dev/null +++ b/docs/reference/rest-api/security/query-user.asciidoc @@ -0,0 +1,310 @@ +[role="xpack"] +[[security-api-query-user]] +=== Query User API + +++++ +Query User +++++ + +Retrieves <> with <> in a <> fashion. + +NOTE: As opposed to the <>, <> are excluded from the +result. This API is only for <>. + +[[security-api-query-user-request]] +==== {api-request-title} + +`GET /_security/_query/user` + +`POST /_security/_query/user` + +[[security-api-query-user-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have at least the `read_security` cluster privilege. + +[[security-api-query-user-desc]] +==== {api-description-title} + +Use this API to retrieve users managed by the +<> in a paginated manner. +You can optionally filter the results with a query. + +[[security-api-query-user-request-body]] +==== {api-request-body-title} + +You can specify the following parameters in the request body: + +`query`:: +(Optional, string) A <> to filter which users to return. +The query supports a subset of query types, including +<>, <>, +<>, <>, +<>, <> and <>. ++ +You can query the following public values associated with a user. ++ +.Valid values for `query` +[%collapsible%open] +==== +`username`:: +An identifier for the user. + +`roles`:: +An array of the role names of the <> that are assigned to the user. + +`full_name`:: +Full name of the user. + +`email`:: +The email of the user. + +`enabled`:: +Specifies whether the user is enabled. + +==== + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] ++ +By default, you cannot page through more than 10,000 hits using the `from` and +`size` parameters. To page through more hits, use the +<> parameter. + +`size`:: +(Optional, integer) The number of hits to return. Must not be negative and defaults to `10`. ++ +By default, you cannot page through more than 10,000 hits using the `from` and +`size` parameters. To page through more hits, use the +<> parameter. + +`sort`:: +(Optional, object) <>. You can sort on `username`, `roles` or `enabled`. +In addition, sort can also be applied to the `_doc` field to sort by index order. + +`search_after`:: +(Optional, array) <> definition. + + +[[security-api-query-user-response-body]] +==== {api-response-body-title} + +This API returns the following top level fields: + +`total`:: +The total number of users found. + +`count`:: +The number of users returned in the response. + +`users`:: +A list of users that match the query. + +[[security-api-query-user-example]] +==== {api-examples-title} + +The following request lists all users, assuming you have the +`read_security` privilege: + +[source,console] +---- +GET /_security/_query/user +---- +// TEST[setup:jacknich_user,sandrakn_user] + +A successful call returns a JSON structure that contains the information +retrieved from one or more users: + +[source,console-result] +---- +{ + "total": 2, + "count": 2, + "users": [ <1> + { + "username": "jacknich", + "roles": [ + "admin", + "other_role1" + ], + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true + }, + { + "username": "sandrakn", + "roles": [ + "admin", + "other_role1" + ], + "full_name": "Sandra Knight", + "email": "sandrakn@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true + } + ] +} +---- +// NOTCONSOLE + +<1> The list of users that were retrieved for this request + +If you create a user with the following details: + +[source,console] +---- +POST /_security/user/jacknich +{ + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Jack Nicholson", + "email" : "jacknich@example.com", + "metadata" : { + "intelligence" : 7 + } +} +---- + +A successful call returns a JSON structure: + +[source,console-result] +---- +{ + "created": true +} +---- + +Use the user information retrieve the user with a query: + +[source,console] +---- +GET /_security/_query/user +{ + "query": { + "prefix": { + "roles": "other" + } + } +} +---- +// TEST[setup:jacknich_user] +A successful call returns a JSON structure for a user: + +[source,console-result] +-------------------------------------------------- +{ + "total": 1, + "count": 1, + "users": [ + { + "username": "jacknich", + "roles": [ + "admin", + "other_role1" + ], + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +Use a `bool` query to issue complex logical conditions and use +`from`, `size`, `sort` to help paginate the result: + +[source,js] +---- +GET /_security/_query/user +{ + "query": { + "bool": { + "must": [ + { + "wildcard": { + "email": "*example.com" <1> + } + }, + { + "term": { + "enabled": true <2> + } + } + ], + "filter": [ + { + "wildcard": { + "roles": "*other*" <3> + } + } + ] + } + }, + "from": 1, <4> + "size": 2, <5> + "sort": [ + { "username": { "order": "desc"} } <6> + ] +} +---- +// NOTCONSOLE + +<1> The email must end with `example.com` +<2> The user must be enabled +<3> The result will be filtered to only contain users with at least one role that contains the substring `other` +<4> The offset to begin the search result is the 2nd (zero-based index) user +<5> The page size of the response is 2 users +<6> The result is sorted by `username` in descending order + +The response contains a list of matched users along with their sort values: + +[source,js] +---- +{ + "total": 5, + "count": 2, + "users": [ + { + "username": "ray", + "roles": [ + "other_role3" + ], + "full_name": "Ray Nicholson", + "email": "rayn@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true, + "_sort": [ + "ray" <1> + ] + }, + { + "username": "lorraine", + "roles": [ + "other_role3" + ], + "full_name": "Lorraine Nicholson", + "email": "lorraine@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true, + "_sort": [ + "lorraine" + ] + } + ] +} +---- +// NOTCONSOLE + +<1> The sort value is `username` diff --git a/docs/reference/rest-api/security/update-settings.asciidoc b/docs/reference/rest-api/security/update-settings.asciidoc index 525b297123c31..0ea41d86e85ed 100644 --- a/docs/reference/rest-api/security/update-settings.asciidoc +++ b/docs/reference/rest-api/security/update-settings.asciidoc @@ -5,12 +5,16 @@ Update Security settings ++++ +[[security-api-update-settings-prereqs]] ==== {api-prereq-title} * To use this API, you must have at least the `manage_security` cluster privilege. +[[security-api-update-settings-desc]] ==== {api-description-title} -This API allows a user to modify the settings for the Security internal indices (`.security` and associated indices). Only a subset of settings are allowed to be modified. This includes: +This API allows a user to modify the settings for the Security internal indices +(`.security` and associated indices). Only a subset of settings are allowed to +be modified. This includes: - `index.auto_expand_replicas` - `index.number_of_replicas` @@ -34,17 +38,23 @@ PUT /_security/settings ----------------------------------------------------------- // TEST[skip:making sure all the indices have been created reliably is difficult] -The configured settings can be retrieved using the <> API. If a -given index is not in use on the system, but settings are provided for it, the request will be rejected - this API does -not yet support configuring the settings for these indices before they are in use. +The configured settings can be retrieved using the +<> API. If a given index +is not in use on the system, but settings are provided for it, the request will +be rejected - this API does not yet support configuring the settings for these +indices before they are in use. + ==== {api-request-body-title} + `security`:: -(Optional, object) Settings to be used for the index used for most security configuration, including Native realm users -and roles configured via the API. +(Optional, object) Settings to be used for the index used for most security +configuration, including Native realm users and roles configured via the API. `security-tokens`:: -(Optional, object) Settings to be used for the index used to store <>. +(Optional, object) Settings to be used for the index used to store +<>. `security`:: -(Optional, object) Settings to be used for the index used to store <> information. +(Optional, object) Settings to be used for the index used to store +<> information. diff --git a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc index 8c289c27a2d31..bf51042d6adec 100644 --- a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc +++ b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc @@ -1,21 +1,22 @@ -[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] +[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] |==== -| 15+^h| Remote cluster version +| 16+^h| Remote cluster version h| Local cluster version - | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 -| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} + | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 +| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} +| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} |==== \ No newline at end of file diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index a847d9a306b7c..a68cacec8c10c 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -821,9 +821,6 @@ Now we have filtered based on the top level `"creation_time"` and only one docum Additionally, if you wanted to extract the nearest passage for a matched document, you can supply <> to the `knn` clause. -NOTE: `inner_hits` for kNN will only ever return a single hit, the nearest passage vector. -Setting `"size"` to any value greater than `1` will have no effect on the results. - NOTE: When using `inner_hits` and multiple `knn` clauses, be sure to specify the <> field. Otherwise, a naming clash can occur and fail the search request. @@ -848,7 +845,8 @@ POST passage_vectors/_search "_source": false, "fields": [ "paragraph.text" - ] + ], + "size": 1 } } } @@ -891,7 +889,7 @@ Now the result will contain the nearest found paragraph when searching. "paragraph": { "hits": { "total": { - "value": 1, + "value": 2, "relation": "eq" }, "max_score": 1.0, @@ -935,7 +933,7 @@ Now the result will contain the nearest found paragraph when searching. "paragraph": { "hits": { "total": { - "value": 1, + "value": 2, "relation": "eq" }, "max_score": 0.9997144, diff --git a/docs/reference/search/search-your-data/search-across-clusters.asciidoc b/docs/reference/search/search-your-data/search-across-clusters.asciidoc index e1b96afd82196..93955659a1b2a 100644 --- a/docs/reference/search/search-your-data/search-across-clusters.asciidoc +++ b/docs/reference/search/search-your-data/search-across-clusters.asciidoc @@ -18,6 +18,7 @@ The following APIs support {ccs}: * <> * <> * {painless}/painless-execute-api.html[Painless execute API] +* <> * experimental:[] <> * experimental:[] <> * experimental:[] <> @@ -478,16 +479,26 @@ The API returns the following response: <1> The async search id. <2> When `ccs_minimize_roundtrips` = `true` and searches on the remote clusters are still running, this section indicates the number of shards in scope for the -local cluster only. This will be updated to include the total number of shards -across all clusters only when the search is completed. When -`ccs_minimize_roundtrips`= `false`, the total shard count is known up front and -will be correct. +local cluster only and any clusters that have finished their search so far. +This will be updated to include the total number of shards across all clusters only +when the search is completed. When `ccs_minimize_roundtrips`= `false`, the total shard +count across all clusters is known up front and will be correct. <3> The `_clusters` section indicates that 3 clusters are in scope for the search and all are currently in the "running" state. If you query the <> endpoint while the query is still running, you will see an update in the `_clusters` and `_shards` section of -the response when the local search has finished. +the response as each cluster finishes its search. + +If you set `ccs_minimize_roundtrips=false`, then you will also see partial aggregation +results from shards (from any cluster) that have finished, but no results are shown in +"hits" section until the search has completed. + +If you set `ccs_minimize_roundtrips=true`, then you will also see partial results +in the "hits" and "aggregations" section of the response from all clusters that have +completed so far. (Note: you can also see partial aggregation results from the local cluster +even before it finishes.) The example below shows the `ccs_minimize_roundtrips=true` case. + [source,console] -------------------------------------------------- @@ -510,16 +521,16 @@ Response: "timed_out": false, "terminated_early": false, "_shards": { - "total": 10, - "successful": 10, <1> + "total": 22, + "successful": 22, <1> "skipped": 0, "failed": 0 }, "_clusters": { "total": 3, - "successful": 1, <2> + "successful": 2, <2> "skipped": 0, - "running": 2, + "running": 1, "partial": 0, "failed": 0, "details": { @@ -536,9 +547,16 @@ Response: } }, "cluster_one": { - "status": "running", + "status": "successful", "indices": "my-index-000001", - "timed_out": false + "took": 9039, + "timed_out": false, + "_shards": { + "total": 12, + "successful": 12, + "skipped": 0, + "failed": 0 + } }, "cluster_two": { "status": "running", @@ -549,11 +567,11 @@ Response: }, "hits": { "total": { - "value": 167, <3> + "value": 542, <3> "relation": "eq" }, - "max_score": null, - "hits": [] + "max_score": 1.7232, + "hits": [...list of hits here...] <4> } } } @@ -561,12 +579,13 @@ Response: // TEST[skip: hard to reproduce intermediate results] -<1> All the local cluster shards have completed. -<2> The local cluster search has completed, so the "successful" clusters entry -is set to 1 and "running" clusters entry reduced to 2. The `_clusters` response metadata will be updated as each cluster -finishes. -<3> Number of hits from the local cluster search. Final hits are not -shown until searches on all clusters have been completed and merged. +<1> Searches on all shards of the local cluster and remote `cluster_one` cluster have completed. +<2> Since two clusters have completed the search, the "successful" clusters entry +is set to 2 and "running" clusters entry is reduced to 1. The `_clusters` response metadata +will be updated as each cluster finishes. +<3> Number of hits from the completed searches so far. Final hits are not shown +until searches on all clusters have been completed and merged. Thus, the "hits" +section can change as you call this endpoint until the search is completely done. After searches on all the clusters have completed, querying the @@ -613,7 +632,7 @@ Response: "(local)": { "status": "successful", "indices": "my-index-000001", - "took": 14382, + "took": 2034, "timed_out": false, "_shards": { "total": 10, @@ -625,7 +644,7 @@ Response: "cluster_one": { "status": "successful", "indices": "my-index-000001", - "took": 22193, + "took": 9039, "timed_out": false, "_shards": { "total": 12, @@ -666,8 +685,8 @@ Response: // TESTRESPONSE[s/1685996911108/$body.expiration_time_in_millis/] // TESTRESPONSE[s/1685564938727/$body.completion_time_in_millis/] // TESTRESPONSE[s/"took": 27619/"took": "$body.response.took"/] -// TESTRESPONSE[s/"took": 14382/"took": "$body.$_path"/] -// TESTRESPONSE[s/"took": 22193/"took": "$body.$_path"/] +// TESTRESPONSE[s/"took": 2034/"took": "$body.$_path"/] +// TESTRESPONSE[s/"took": 9039/"took": "$body.$_path"/] // TESTRESPONSE[s/"took": 27550/"took": "$body.$_path"/] // TESTRESPONSE[s/"total": 28/"total": $body.response._shards.total/] // TESTRESPONSE[s/"successful": 28/"successful": $body.response._shards.successful/] @@ -1033,8 +1052,9 @@ Key differences are: of shards is gathered from all clusters before the search starts. . The `_shards` section will be incrementally updated as searches on individual -shards complete, so you will get a more accurate accounting of progress during a -long-running search compared to when minimize roundtrips is used. +shards complete, whereas when minimizing roundtrips, the shards section will be +updated as searches on shards complete on the local cluster and then as each +remote cluster reports back its full search results. . The `_cluster` section starts off listing all of its shard counts, since they are also obtained before the query phase begins. @@ -1224,13 +1244,23 @@ network roundtrips, and sets the parameter `ccs_minimize_roundtrips` to `false`. [[ccs-min-roundtrips]] ==== Considerations for choosing whether to minimize roundtrips in a {ccs} -For cross-cluster searches that query a large number of shards, the minimize roundtrips +Advantages of minimizing roundtrips: + +. For cross-cluster searches that query a large number of shards, the minimize roundtrips option typically provides much better performance. This is especially true if the clusters being searched have high network latency (e.g., distant geographic regions). -However, not minimizing roundtrips allows you to get back incremental results of -any aggregations in your query when using async-search while the search is still -running. +. When doing an async {ccs}, the `GET _async_search/` endpoint will provide both +top hits and aggregations from all clusters that have reported back results even while the search +is still running on other clusters. In other words, it provides "incremental" partial results as +the search progresses. Note that if the local cluster is included in the search, it has special +handling in that it can show partial aggregations (but not partial top hits) while the search +on the local cluster is still running. + + +Not minimizing roundtrips when using async-search allows you to get back incremental results of +any aggregations in your query as individual shards complete (rather than whole clusters) while +the search is still running, but top hits are not shown until the search has completed on all clusters. By default, synchronous searches minimize roundtrips, while asynchronous searches do not. You can override the default by using the `ccs_minimize_roundtrips` parameter, diff --git a/docs/reference/security/auditing/event-types.asciidoc b/docs/reference/security/auditing/event-types.asciidoc index 9539ea38b5a6b..a856336dba8d0 100644 --- a/docs/reference/security/auditing/event-types.asciidoc +++ b/docs/reference/security/auditing/event-types.asciidoc @@ -255,7 +255,7 @@ event action. "applications":[],"run_as":[]},{"cluster":["all"],"indices":[{"names": ["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}], "metadata":{"application":"my-application","environment":{"level": 1, -"tags":["dev","staging"]}}}}} +"tags":["dev","staging"]}},"expiration":"10d"}}} ==== [[event-change-apikeys]] @@ -281,7 +281,7 @@ event action. "applications":[],"run_as":[]},{"cluster":["all"],"indices":[{"names": ["index-b*"],"privileges":["all"]}],"applications":[],"run_as":[]}], "metadata":{"application":"my-application","environment":{"level":1, -"tags":["dev","staging"]}}}}} +"tags":["dev","staging"]}},"expiration":"10d"}}} ==== [[event-delete-privileges]] @@ -797,7 +797,7 @@ The `role_descriptors` objects have the same schema as the `role_descriptor` object that is part of the above `role` config object. The object for an API key update will differ in that it will not include -a `name` or `expiration`. +a `name`. `grant` :: An object like: + diff --git a/docs/reference/security/securing-communications/security-minimal-setup.asciidoc b/docs/reference/security/securing-communications/security-minimal-setup.asciidoc index 6fc561c7296fb..ee158294df03c 100644 --- a/docs/reference/security/securing-communications/security-minimal-setup.asciidoc +++ b/docs/reference/security/securing-communications/security-minimal-setup.asciidoc @@ -111,7 +111,7 @@ you created earlier. {kib} performs some background tasks that require use of th This account is not meant for individual users and does not have permission to log in to {kib} from a browser. Instead, you'll log in to {kib} as the `elastic` superuser. -. Add the `elasticsearch.username` setting to the `KIB_PATH_CONF/kibana.yml` +. Add the `elasticsearch.username` setting to the `KBN_PATH_CONF/kibana.yml` file and set the value to the `kibana_system` user: + [source,yaml] @@ -119,7 +119,7 @@ file and set the value to the `kibana_system` user: elasticsearch.username: "kibana_system" ---- + -NOTE: The `KIB_PATH_CONF` variable is the path for the {kib} +NOTE: The `KBN_PATH_CONF` variable is the path for the {kib} configuration files. If you installed {kib} using archive distributions (`zip` or `tar.gz`), the variable defaults to `KIB_HOME/config`. If you used package distributions (Debian or RPM), the variable defaults to `/etc/kibana`. diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index e204061c28458..b01f7322f9834 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -296,18 +296,10 @@ include::repository-shared-settings.asciidoc[] `storage_class`:: - Sets the S3 storage class for objects stored in the snapshot repository. - Values may be `standard`, `reduced_redundancy`, `standard_ia`, `onezone_ia` - and `intelligent_tiering`. Defaults to `standard`. Changing this setting on - an existing repository only affects the storage class for newly created - objects, resulting in a mixed usage of storage classes. You may use an S3 - Lifecycle Policy to adjust the storage class of existing objects in your - repository, but you must not transition objects to Glacier classes and you - must not expire objects. If you use Glacier storage classes or object - expiry then you may permanently lose access to your repository contents. - For more information about S3 storage classes, see - https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html[AWS - Storage Classes Guide] + Sets the S3 storage class for objects written to the repository. Values may + be `standard`, `reduced_redundancy`, `standard_ia`, `onezone_ia` and + `intelligent_tiering`. Defaults to `standard`. See + <> for more information. NOTE: The option of defining client settings in the repository settings as documented below is considered deprecated, and will be removed in a future @@ -338,6 +330,37 @@ PUT _snapshot/my_s3_repository This sets up a repository that uses all client settings from the client `my_client_name` except for the `endpoint` that is overridden to `my.s3.endpoint` by the repository settings. +` +[[repository-s3-storage-classes]] +==== S3 storage classes + +Amazon S3 supports a variety of _storage classes_, each of which offers +different operational characteristics. For instance, some classes cost less per +byte stored per month, but cost more per request, and other classes may vary in +terms of their availability guarantees. + +You may specify the storage class that {es} uses to store data objects with the +`storage_class` repository setting. + +Changing the `storage_class` setting on an existing repository only affects the +storage class for newly created objects, resulting in a mixed usage of storage +classes. + +You may use an S3 Lifecycle Policy to adjust the storage class of existing +objects in your repository, but you must not transition objects to an +unsupported class such as the Glacier classes, and you must not expire objects. +If you use a Glacier storage class, or another unsupported storage class, or +object expiry, then you may permanently lose access to your repository +contents. + +You may use the `intellligent_tiering` storage class to automatically manage +the class of objects, but you must not enable the optional Archive Access or +Deep Archive Access tiers. If you use these tiers then you may permanently lose +access to your repository contents. + +For more information about S3 storage classes, see +https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html[AWS +Storage Classes Guide]. [[repository-s3-permissions]] ==== Recommended S3 permissions diff --git a/docs/reference/text-structure/apis/index.asciidoc b/docs/reference/text-structure/apis/index.asciidoc new file mode 100644 index 0000000000000..8628badba7e78 --- /dev/null +++ b/docs/reference/text-structure/apis/index.asciidoc @@ -0,0 +1,11 @@ +[role="xpack"] +[[text-structure-apis]] +== Text structure APIs + +You can use the following APIs to find text structures: + +* <> +* <> + +include::find-structure.asciidoc[leveloffset=+2] +include::test-grok-pattern.asciidoc[leveloffset=+2] diff --git a/docs/reference/text-structure/apis/test-grok-pattern.asciidoc b/docs/reference/text-structure/apis/test-grok-pattern.asciidoc new file mode 100644 index 0000000000000..4034a24cf0a19 --- /dev/null +++ b/docs/reference/text-structure/apis/test-grok-pattern.asciidoc @@ -0,0 +1,95 @@ +[role="xpack"] +[[test-grok-pattern]] += Test Grok pattern API + +++++ +Test Grok pattern +++++ + +Tests a Grok pattern on lines of text, see also <>. + +[discrete] +[[test-grok-pattern-request]] +== {api-request-title} + +`GET _text_structure/test_grok_pattern` + + +`POST _text_structure/test_grok_pattern` + + +[discrete] +[[test-grok-pattern-desc]] +== {api-description-title} + +The test Grok pattern API allows you to execute a Grok pattern on one +or more lines of text. It returns whether the lines match the pattern +together with the offsets and lengths of the matched substrings. + +[discrete] +[[test-grok-pattern-query-parms]] +== {api-query-parms-title} + +`ecs_compatibility`:: +(Optional, string) The mode of compatibility with ECS compliant Grok patterns. +Use this parameter to specify whether to use ECS Grok patterns instead of +legacy ones when the structure finder creates a Grok pattern. Valid values +are `disabled` and `v1`. The default value is `disabled`. + +[discrete] +[[test-grok-pattern-request-body]] +== {api-request-body-title} + +`grok_pattern`:: +(Required, string) +The Grok pattern to run on the lines of text. + +`text`:: +(Required, array of strings) +The lines of text to run the Grok pattern on. + +[discrete] +[[test-grok-pattern-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +GET _text_structure/test_grok_pattern +{ + "grok_pattern": "Hello %{WORD:first_name} %{WORD:last_name}", + "text": [ + "Hello John Doe", + "this does not match" + ] +} +-------------------------------------------------- + +The API returns the following response: + +[source,console-result] +---- +{ + "matches": [ + { + "matched": true, + "fields": { + "first_name": [ + { + "match": "John", + "offset": 6, + "length": 4 + } + ], + "last_name": [ + { + "match": "Doe", + "offset": 11, + "length": 3 + } + ] + } + }, + { + "matched": false + } + ] +} +---- diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index e5ad75e048c1b..de1f9e6c7a608 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -58,9 +58,14 @@ fix problems that an {es} deployment might encounter. * <> * <> -If none of these solutions relate to your issue, you can still get help: +[discrete] +[[troubleshooting-contact-support]] +=== Contact us + +If none of these guides relate to your issue, or you need further assistance, +then you can contact us as follows: -* For users with an active subscription, you can get help in several ways: +* If you have an active subscription, you have several options: ** Go directly to the http://support.elastic.co[Support Portal] diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index be69b2341d6e2..6960ab39b48cf 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -1,5 +1,5 @@ [versions] -asm = "9.4" +asm = "9.6" jackson = "2.15.0" junit5 = "5.8.1" spock = "2.1-groovy-3.0" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 24b81106dcea3..295cb08847f83 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1574,19 +1574,9 @@ - - - - - - - - - - - - - + + + @@ -1689,46 +1679,25 @@ - - - + + + - - + + - - - + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -2664,244 +2633,124 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -3807,29 +3656,14 @@ - - - - - - - - - - - - - - - - - - + + + - - - + + + diff --git a/libs/core/build.gradle b/libs/core/build.gradle index ac36d98375237..1e20f4ae22949 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -7,6 +7,7 @@ */ apply plugin: 'elasticsearch.publish' +apply plugin: 'elasticsearch.mrjar' dependencies { // This dependency is used only by :libs:core for null-checking interop with other tools diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 5b9a6f4698967..9acaefcd91976 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -23,6 +23,7 @@ import java.util.Locale; import java.util.Map; import java.util.function.Consumer; +import java.util.function.Function; public final class Grok { @@ -86,7 +87,7 @@ private Grok( expressionBytes.length, Option.DEFAULT, UTF8Encoding.INSTANCE, - message -> logCallBack.accept(message) + logCallBack::accept ); List grokCaptureConfigs = new ArrayList<>(); @@ -116,7 +117,7 @@ private static String groupMatch(String name, Region region, String pattern) { * * @return named regex expression */ - protected String toRegex(PatternBank patternBank, String grokPattern) { + String toRegex(PatternBank patternBank, String grokPattern) { StringBuilder res = new StringBuilder(); for (int i = 0; i < MAX_TO_REGEX_ITERATIONS; i++) { byte[] grokPatternBytes = grokPattern.getBytes(StandardCharsets.UTF_8); @@ -189,8 +190,25 @@ public boolean match(String text) { * @return a map containing field names and their respective coerced values that matched or null if the pattern didn't match */ public Map captures(String text) { + return innerCaptures(text, cfg -> cfg::objectExtracter); + } + + /** + * Matches and returns the ranges of any named captures. + * + * @param text the text to match and extract values from. + * @return a map containing field names and their respective ranges that matched or null if the pattern didn't match + */ + public Map captureRanges(String text) { + return innerCaptures(text, cfg -> cfg::rangeExtracter); + } + + private Map innerCaptures( + String text, + Function, GrokCaptureExtracter>> getExtracter + ) { byte[] utf8Bytes = text.getBytes(StandardCharsets.UTF_8); - GrokCaptureExtracter.MapExtracter extracter = new GrokCaptureExtracter.MapExtracter(captureConfig); + GrokCaptureExtracter.MapExtracter extracter = new GrokCaptureExtracter.MapExtracter(captureConfig, getExtracter); if (match(utf8Bytes, 0, utf8Bytes.length, extracter)) { return extracter.result(); } diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java index 703db401814d0..3b10cffebebbf 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java @@ -144,4 +144,21 @@ public interface NativeExtracterMap { */ T forBoolean(Function, GrokCaptureExtracter> buildExtracter); } + + /** + * Creates a {@linkplain GrokCaptureExtracter} that will call {@code emit} with the + * extracted range (offset and length) when it extracts text. + */ + public GrokCaptureExtracter rangeExtracter(Consumer emit) { + return (utf8Bytes, offset, region) -> { + for (int number : backRefs) { + if (region.beg[number] >= 0) { + int matchOffset = offset + region.beg[number]; + int matchLength = region.end[number] - region.beg[number]; + String match = new String(utf8Bytes, matchOffset, matchLength, StandardCharsets.UTF_8); + emit.accept(new GrokCaptureExtracter.Range(match, matchOffset, matchLength)); + } + } + }; + } } diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureExtracter.java b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureExtracter.java index fa7762d7d6cf6..415348be05a7c 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureExtracter.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureExtracter.java @@ -11,9 +11,11 @@ import org.joni.Region; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Function; import static java.util.Collections.emptyMap; @@ -22,6 +24,8 @@ */ public interface GrokCaptureExtracter { + record Range(Object match, int offset, int length) {} + /** * Extract {@link Map} results. This implementation of {@link GrokCaptureExtracter} * is mutable and should be discarded after collecting a single result. @@ -31,11 +35,14 @@ class MapExtracter implements GrokCaptureExtracter { private final List fieldExtracters; @SuppressWarnings("unchecked") - MapExtracter(List captureConfig) { - result = captureConfig.isEmpty() ? emptyMap() : new HashMap<>(); + MapExtracter( + List captureConfig, + Function, GrokCaptureExtracter>> getExtracter + ) { + result = captureConfig.isEmpty() ? emptyMap() : new LinkedHashMap<>(); fieldExtracters = new ArrayList<>(captureConfig.size()); for (GrokCaptureConfig config : captureConfig) { - fieldExtracters.add(config.objectExtracter(value -> { + fieldExtracters.add(getExtracter.apply(config).apply(value -> { var key = config.name(); // Logstash's Grok processor flattens the list of values to a single value in case there's only 1 match, diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index 6c5618f11ec93..41a4a86c4cb8b 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -82,13 +82,61 @@ private void testCapturesBytes(boolean ecsCompatibility) { } private Map captureBytes(Grok grok, byte[] utf8, int offset, int length) { - GrokCaptureExtracter.MapExtracter extracter = new GrokCaptureExtracter.MapExtracter(grok.captureConfig()); + GrokCaptureExtracter.MapExtracter extracter = new GrokCaptureExtracter.MapExtracter( + grok.captureConfig(), + cfg -> cfg::objectExtracter + ); if (grok.match(utf8, offset, length, extracter)) { return extracter.result(); } return null; } + public void testCaptureRanges() { + captureRanges(false); + captureRanges(true); + } + + private void captureRanges(boolean ecsCompatibility) { + Grok grok = new Grok(GrokBuiltinPatterns.get(ecsCompatibility), "%{WORD:a} %{WORD:b} %{NUMBER:c:int}", logger::warn); + assertThat( + grok.captureRanges("xx aaaaa bbb 1234 yyy"), + equalTo( + Map.of( + "a", + new GrokCaptureExtracter.Range("aaaaa", 3, 5), + "b", + new GrokCaptureExtracter.Range("bbb", 9, 3), + "c", + new GrokCaptureExtracter.Range("1234", 13, 4) + ) + ) + ); + } + + public void testCaptureRanges_noMatch() { + captureRanges_noMatch(false); + captureRanges_noMatch(true); + } + + private void captureRanges_noMatch(boolean ecsCompatibility) { + Grok grok = new Grok(GrokBuiltinPatterns.get(ecsCompatibility), "%{WORD:a} %{WORD:b} %{NUMBER:c:int}", logger::warn); + assertNull(grok.captureRanges("xx aaaaa bbb ccc yyy")); + } + + public void testCaptureRanges_multipleNamedCapturesWithSameName() { + captureRanges_multipleNamedCapturesWithSameName(false); + captureRanges_multipleNamedCapturesWithSameName(true); + } + + private void captureRanges_multipleNamedCapturesWithSameName(boolean ecsCompatibility) { + Grok grok = new Grok(GrokBuiltinPatterns.get(ecsCompatibility), "%{WORD:parts} %{WORD:parts}", logger::warn); + assertThat( + grok.captureRanges(" aa bbb c ddd e "), + equalTo(Map.of("parts", List.of(new GrokCaptureExtracter.Range("aa", 2, 2), new GrokCaptureExtracter.Range("bbb", 5, 3)))) + ); + } + public void testNoMatchingPatternInDictionary() { Exception e = expectThrows(IllegalArgumentException.class, () -> new Grok(PatternBank.EMPTY, "%{NOTFOUND}", logger::warn)); assertThat(e.getMessage(), equalTo("Unable to find pattern [NOTFOUND] in Grok's pattern dictionary")); diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java index 2050ce20b1aee..917d8f0b80f2c 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java @@ -22,8 +22,8 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception { public void testStandAloneTimeSeriesAgg() { assertNoFailuresAndResponse(prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); assertThat( @@ -203,7 +203,7 @@ public void testTimeSeriesGroupedByADimension() { .subAggregation(timeSeries("by_ts")) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Terms terms = aggregations.get("by_dim"); Set> keys = new HashSet<>(); @@ -236,7 +236,7 @@ public void testTimeSeriesGroupedByDateHistogram() { .subAggregation(timeSeries("by_ts").subAggregation(stats("timestamp").field("@timestamp"))) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Histogram histogram = aggregations.get("by_time"); Map, Long> keys = new HashMap<>(); @@ -275,7 +275,7 @@ public void testStandAloneTimeSeriesAggWithDimFilter() { assertNoFailuresAndResponse( prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); @@ -308,7 +308,7 @@ public void testStandAloneTimeSeriesAggWithGlobalAggregation() { .addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric))) .addAggregation(PipelineAggregatorBuilders.sumBucket("total_filter_sum", "by_ts>filter_sum")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); @@ -353,7 +353,7 @@ public void testStandAloneTimeSeriesAggWithMetricFilter() { assertNoFailuresAndResponse( prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByMetric( diff --git a/modules/aggregations/src/main/java/module-info.java b/modules/aggregations/src/main/java/module-info.java index b9f2cb834736f..c9abc97eae8dc 100644 --- a/modules/aggregations/src/main/java/module-info.java +++ b/modules/aggregations/src/main/java/module-info.java @@ -22,8 +22,4 @@ opens org.elasticsearch.aggregations to org.elasticsearch.painless.spi; // whitelist resource access provides org.elasticsearch.painless.spi.PainlessExtension with org.elasticsearch.aggregations.AggregationsPainlessExtension; - - provides org.elasticsearch.plugins.spi.NamedXContentProvider - with - org.elasticsearch.aggregations.metric.MatrixStatsNamedXContentProvider; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 44a79fd6dc104..07a363ed727c7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -20,7 +20,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; @@ -195,7 +194,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I } assert builtBucketIndex == totalBucketsToBuild; builtBucketIndex = 0; - InternalAggregations[] bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); + var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < owningBucketOrds.length; owningBucketOrdIdx++) { List buckets = new ArrayList<>(filters.length); @@ -209,7 +208,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( keys[i], docCount, - bucketSubAggs[builtBucketIndex++] + bucketSubAggs.apply(builtBucketIndex++) ); buckets.add(bucket); } @@ -225,7 +224,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( intersectKey, docCount, - bucketSubAggs[builtBucketIndex++] + bucketSubAggs.apply(builtBucketIndex++) ); buckets.add(bucket); } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java deleted file mode 100644 index 1c558db86e8eb..0000000000000 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.aggregations.bucket.adjacency; - -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -public class ParsedAdjacencyMatrix extends ParsedMultiBucketAggregation implements AdjacencyMatrix { - - private Map bucketMap; - - @Override - public String getType() { - return AdjacencyMatrixAggregationBuilder.NAME; - } - - @Override - public List getBuckets() { - return buckets; - } - - @Override - public ParsedBucket getBucketByKey(String key) { - if (bucketMap == null) { - bucketMap = Maps.newMapWithExpectedSize(buckets.size()); - for (ParsedBucket bucket : buckets) { - bucketMap.put(bucket.getKey(), bucket); - } - } - return bucketMap.get(key); - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedAdjacencyMatrix.class.getSimpleName(), - true, - ParsedAdjacencyMatrix::new - ); - static { - declareMultiBucketAggregationFields(PARSER, ParsedBucket::fromXContent, ParsedBucket::fromXContent); - } - - public static ParsedAdjacencyMatrix fromXContent(XContentParser parser, String name) throws IOException { - ParsedAdjacencyMatrix aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements AdjacencyMatrix.Bucket { - - private String key; - - @Override - public String getKey() { - return key; - } - - @Override - public String getKeyAsString() { - return key; - } - - static ParsedBucket fromXContent(XContentParser parser) throws IOException { - return parseXContent(parser, false, ParsedBucket::new, (p, bucket) -> bucket.key = p.text()); - } - } -} diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index c058fb5743369..de36a9721fe38 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; @@ -109,7 +108,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } @@ -416,7 +415,7 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con long docCount = 0; for (Bucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add((InternalAggregations) bucket.getAggregations()); + aggregations.add(bucket.getAggregations()); } InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return new InternalAutoDateHistogram.Bucket(buckets.get(0).key, docCount, format, aggs); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/ParsedAutoDateHistogram.java deleted file mode 100644 index 1ba29602ed45b..0000000000000 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/ParsedAutoDateHistogram.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.aggregations.bucket.histogram; - -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneOffset; -import java.util.List; - -public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram { - - @Override - public String getType() { - return AutoDateHistogramAggregationBuilder.NAME; - } - - private String interval; - - public String getInterval() { - return interval; - } - - public void setInterval(String interval) { - this.interval = interval; - } - - @Override - public List getBuckets() { - return buckets; - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedAutoDateHistogram.class.getSimpleName(), - true, - ParsedAutoDateHistogram::new - ); - static { - declareMultiBucketAggregationFields( - PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true) - ); - PARSER.declareString((parsed, value) -> parsed.interval = value, new ParseField("interval")); - } - - public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException { - ParsedAutoDateHistogram aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder = super.doXContentBody(builder, params); - builder.field("interval", getInterval()); - return builder; - } - - public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { - - private Long key; - - @Override - public Object getKey() { - if (key != null) { - return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); - } - return null; - } - - @Override - public String getKeyAsString() { - String keyAsString = super.getKeyAsString(); - if (keyAsString != null) { - return keyAsString; - } - if (key != null) { - return Long.toString(key); - } - return null; - } - - @Override - protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { - return builder.field(CommonFields.KEY.getPreferredName(), key); - } - - static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { - return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue()); - } - } -} diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/ParsedTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/ParsedTimeSeries.java deleted file mode 100644 index f4eff3e878b59..0000000000000 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/ParsedTimeSeries.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.aggregations.bucket.timeseries; - -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -public class ParsedTimeSeries extends ParsedMultiBucketAggregation { - - private transient Map bucketMap; - - @Override - public String getType() { - return TimeSeriesAggregationBuilder.NAME; - } - - @Override - public List getBuckets() { - return buckets; - } - - public ParsedBucket getBucketByKey(String key) { - if (bucketMap == null) { - bucketMap = new HashMap<>(buckets.size()); - for (ParsedTimeSeries.ParsedBucket bucket : buckets) { - bucketMap.put(bucket.getKeyAsString(), bucket); - } - } - return bucketMap.get(key); - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedTimeSeries.class.getSimpleName(), - true, - ParsedTimeSeries::new - ); - static { - declareMultiBucketAggregationFields( - PARSER, - parser -> ParsedTimeSeries.ParsedBucket.fromXContent(parser, false), - parser -> ParsedTimeSeries.ParsedBucket.fromXContent(parser, true) - ); - } - - public static ParsedTimeSeries fromXContent(XContentParser parser, String name) throws IOException { - ParsedTimeSeries aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket { - - private Map key; - - @Override - public Object getKey() { - return key; - } - - @Override - public String getKeyAsString() { - return key.toString(); - } - - static ParsedTimeSeries.ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { - return parseXContent(parser, keyed, ParsedTimeSeries.ParsedBucket::new, (p, bucket) -> bucket.key = new TreeMap<>(p.map())); - } - } - -} diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsNamedXContentProvider.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsNamedXContentProvider.java deleted file mode 100644 index 66670a3f69022..0000000000000 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsNamedXContentProvider.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.aggregations.metric; - -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.List; - -import static java.util.Collections.singletonList; - -public class MatrixStatsNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - ParseField parseField = new ParseField(MatrixStatsAggregationBuilder.NAME); - ContextParser contextParser = (p, name) -> ParsedMatrixStats.fromXContent(p, (String) name); - return singletonList(new NamedXContentRegistry.Entry(Aggregation.class, parseField, contextParser)); - } -} diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/ParsedMatrixStats.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/ParsedMatrixStats.java deleted file mode 100644 index 2866a08e8608e..0000000000000 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/ParsedMatrixStats.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.aggregations.metric; - -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Objects; - -public class ParsedMatrixStats extends ParsedAggregation { - - private final Map counts = new LinkedHashMap<>(); - private final Map means = new HashMap<>(); - private final Map variances = new HashMap<>(); - private final Map skewness = new HashMap<>(); - private final Map kurtosis = new HashMap<>(); - private final Map> covariances = new HashMap<>(); - private final Map> correlations = new HashMap<>(); - - private long docCount; - - @Override - public String getType() { - return MatrixStatsAggregationBuilder.NAME; - } - - private void setDocCount(long docCount) { - this.docCount = docCount; - } - - public long getDocCount() { - return docCount; - } - - public long getFieldCount(String field) { - if (counts.containsKey(field) == false) { - return 0; - } - return counts.get(field); - } - - public double getMean(String field) { - return checkedGet(means, field); - } - - public double getVariance(String field) { - return checkedGet(variances, field); - } - - public double getSkewness(String field) { - return checkedGet(skewness, field); - } - - public double getKurtosis(String field) { - return checkedGet(kurtosis, field); - } - - public double getCovariance(String fieldX, String fieldY) { - if (fieldX.equals(fieldY)) { - return checkedGet(variances, fieldX); - } - return MatrixStatsResults.getValFromUpperTriangularMatrix(covariances, fieldX, fieldY); - } - - public double getCorrelation(String fieldX, String fieldY) { - if (fieldX.equals(fieldY)) { - return 1.0; - } - return MatrixStatsResults.getValFromUpperTriangularMatrix(correlations, fieldX, fieldY); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - if (counts.isEmpty() == false) { - builder.startArray(InternalMatrixStats.Fields.FIELDS); - for (String fieldName : counts.keySet()) { - builder.startObject(); - builder.field(InternalMatrixStats.Fields.NAME, fieldName); - builder.field(InternalMatrixStats.Fields.COUNT, getFieldCount(fieldName)); - builder.field(InternalMatrixStats.Fields.MEAN, getMean(fieldName)); - builder.field(InternalMatrixStats.Fields.VARIANCE, getVariance(fieldName)); - builder.field(InternalMatrixStats.Fields.SKEWNESS, getSkewness(fieldName)); - builder.field(InternalMatrixStats.Fields.KURTOSIS, getKurtosis(fieldName)); - { - builder.startObject(InternalMatrixStats.Fields.COVARIANCE); - Map covars = covariances.get(fieldName); - if (covars != null) { - for (Map.Entry covar : covars.entrySet()) { - builder.field(covar.getKey(), covar.getValue()); - } - } - builder.endObject(); - } - { - builder.startObject(InternalMatrixStats.Fields.CORRELATION); - Map correls = correlations.get(fieldName); - if (correls != null) { - for (Map.Entry correl : correls.entrySet()) { - builder.field(correl.getKey(), correl.getValue()); - } - } - builder.endObject(); - } - builder.endObject(); - } - builder.endArray(); - } - return builder; - } - - private static T checkedGet(final Map values, final String fieldName) { - if (fieldName == null) { - throw new IllegalArgumentException("field name cannot be null"); - } - if (values.containsKey(fieldName) == false) { - throw new IllegalArgumentException("field " + fieldName + " does not exist"); - } - return values.get(fieldName); - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedMatrixStats.class.getSimpleName(), - true, - ParsedMatrixStats::new - ); - static { - declareAggregationFields(PARSER); - PARSER.declareLong(ParsedMatrixStats::setDocCount, CommonFields.DOC_COUNT); - PARSER.declareObjectArray((matrixStats, results) -> { - for (ParsedMatrixStatsResult result : results) { - final String fieldName = result.name; - matrixStats.counts.put(fieldName, result.count); - matrixStats.means.put(fieldName, result.mean); - matrixStats.variances.put(fieldName, result.variance); - matrixStats.skewness.put(fieldName, result.skewness); - matrixStats.kurtosis.put(fieldName, result.kurtosis); - matrixStats.covariances.put(fieldName, result.covariances); - matrixStats.correlations.put(fieldName, result.correlations); - } - }, (p, c) -> ParsedMatrixStatsResult.fromXContent(p), new ParseField(InternalMatrixStats.Fields.FIELDS)); - } - - public static ParsedMatrixStats fromXContent(XContentParser parser, String name) throws IOException { - ParsedMatrixStats aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - static class ParsedMatrixStatsResult { - - String name; - Long count; - Double mean; - Double variance; - Double skewness; - Double kurtosis; - Map covariances; - Map correlations; - - private static final ObjectParser RESULT_PARSER = new ObjectParser<>( - ParsedMatrixStatsResult.class.getSimpleName(), - true, - ParsedMatrixStatsResult::new - ); - static { - RESULT_PARSER.declareString((result, name) -> result.name = name, new ParseField(InternalMatrixStats.Fields.NAME)); - RESULT_PARSER.declareLong((result, count) -> result.count = count, new ParseField(InternalMatrixStats.Fields.COUNT)); - RESULT_PARSER.declareDouble((result, mean) -> result.mean = mean, new ParseField(InternalMatrixStats.Fields.MEAN)); - RESULT_PARSER.declareDouble( - (result, variance) -> result.variance = variance, - new ParseField(InternalMatrixStats.Fields.VARIANCE) - ); - RESULT_PARSER.declareDouble( - (result, skewness) -> result.skewness = skewness, - new ParseField(InternalMatrixStats.Fields.SKEWNESS) - ); - RESULT_PARSER.declareDouble( - (result, kurtosis) -> result.kurtosis = kurtosis, - new ParseField(InternalMatrixStats.Fields.KURTOSIS) - ); - - RESULT_PARSER.declareObject((ParsedMatrixStatsResult result, Map covars) -> { - result.covariances = Maps.newLinkedHashMapWithExpectedSize(covars.size()); - for (Map.Entry covar : covars.entrySet()) { - result.covariances.put(covar.getKey(), mapValueAsDouble(covar.getValue())); - } - }, (p, c) -> p.mapOrdered(), new ParseField(InternalMatrixStats.Fields.COVARIANCE)); - - RESULT_PARSER.declareObject((ParsedMatrixStatsResult result, Map correls) -> { - result.correlations = Maps.newLinkedHashMapWithExpectedSize(correls.size()); - for (Map.Entry correl : correls.entrySet()) { - result.correlations.put(correl.getKey(), mapValueAsDouble(correl.getValue())); - } - }, (p, c) -> p.mapOrdered(), new ParseField(InternalMatrixStats.Fields.CORRELATION)); - } - - private static Double mapValueAsDouble(Object value) { - if (value instanceof Double) { - return (Double) value; - } - return Double.valueOf(Objects.toString(value)); - } - - static ParsedMatrixStatsResult fromXContent(XContentParser parser) throws IOException { - return RESULT_PARSER.parse(parser, null); - } - } -} diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java index 89d445903f8cc..91aba020b8856 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java @@ -69,7 +69,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits; } final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toCollection(ArrayList::new)); aggs.add(new Derivative(name(), gradient, xDiff, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/modules/aggregations/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/modules/aggregations/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider deleted file mode 100644 index c44951e05c942..0000000000000 --- a/modules/aggregations/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ /dev/null @@ -1,9 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0 and the Server Side Public License, v 1; you may not use this file except -# in compliance with, at your election, the Elastic License 2.0 or the Server -# Side Public License, v 1. -# - -org.elasticsearch.aggregations.metric.MatrixStatsNamedXContentProvider diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/AggregationMultiBucketAggregationTestCase.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/AggregationMultiBucketAggregationTestCase.java index 76558b242c86d..a5fbfeaa4d537 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/AggregationMultiBucketAggregationTestCase.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/AggregationMultiBucketAggregationTestCase.java @@ -9,18 +9,10 @@ package org.elasticsearch.aggregations.bucket; import org.elasticsearch.aggregations.AggregationsPlugin; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.List; -import java.util.Map; /** * Base class for unit testing multi bucket aggregation's bucket implementations that reside in aggregations module. @@ -35,15 +27,4 @@ protected SearchPlugin registerPlugin() { return new AggregationsPlugin(); } - @Override - protected List getNamedXContents() { - var entry = getParser(); - return CollectionUtils.appendToCopy( - getDefaultNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()) - ); - } - - protected abstract Map.Entry> getParser(); - } diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java index 22877c5bbc32b..e2f544845a60a 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java @@ -10,9 +10,7 @@ import org.elasticsearch.aggregations.bucket.AggregationMultiBucketAggregationTestCase; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.xcontent.ContextParser; import java.util.ArrayList; import java.util.HashMap; @@ -22,11 +20,6 @@ public class InternalAdjacencyMatrixTests extends AggregationMultiBucketAggregationTestCase { - @Override - protected Map.Entry> getParser() { - return Map.entry(AdjacencyMatrixAggregationBuilder.NAME, (p, c) -> ParsedAdjacencyMatrix.fromXContent(p, (String) c)); - } - private List keys; @Override @@ -94,11 +87,6 @@ protected void assertReduced(InternalAdjacencyMatrix reduced, List implementationClass() { - return ParsedAdjacencyMatrix.class; - } - @Override protected InternalAdjacencyMatrix mutateInstance(InternalAdjacencyMatrix instance) { String name = instance.getName(); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 6b59aa6e84657..ad7b1f3c1efbe 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -21,14 +21,12 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.test.TransportVersionUtils; -import org.elasticsearch.xcontent.ContextParser; import java.io.IOException; import java.time.Instant; @@ -55,11 +53,6 @@ public class InternalAutoDateHistogramTests extends AggregationMultiBucketAggregationTestCase { - @Override - protected Map.Entry> getParser() { - return Map.entry(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c)); - } - protected InternalAutoDateHistogram createTestInstance( String name, Map metadata, @@ -274,11 +267,6 @@ private int getBucketCount(long min, long max, Rounding.Prepared prepared, int i return bucketCount; } - @Override - protected Class implementationClass() { - return ParsedAutoDateHistogram.class; - } - @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName(); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java index 1ed6fa058e643..cc3813e7ec53a 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java @@ -16,11 +16,9 @@ import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.xcontent.ContextParser; import java.io.IOException; import java.io.UncheckedIOException; @@ -30,18 +28,12 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import java.util.function.Predicate; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class InternalTimeSeriesTests extends AggregationMultiBucketAggregationTestCase { - @Override - protected Map.Entry> getParser() { - return Map.entry(TimeSeriesAggregationBuilder.NAME, (p, c) -> ParsedTimeSeries.fromXContent(p, (String) c)); - } - private List randomBuckets(boolean keyed, InternalAggregations aggregations) { int numberOfBuckets = randomNumberOfBuckets(); List bucketList = new ArrayList<>(numberOfBuckets); @@ -108,16 +100,6 @@ protected void assertReduced(InternalTimeSeries reduced, List implementationClass() { - return ParsedTimeSeries.class; - } - - @Override - protected Predicate excludePathsFromXContentInsertion() { - return s -> s.endsWith(".key"); - } - public void testReduceSimple() { // a simple test, to easily spot easy mistakes in the merge logic in InternalTimeSeries#reduce(...) method. InternalTimeSeries first = new InternalTimeSeries( diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/InternalMatrixStatsTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/InternalMatrixStatsTests.java index 9ca315b5a7b76..66dcd6dc1cb92 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/InternalMatrixStatsTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/InternalMatrixStatsTests.java @@ -8,34 +8,25 @@ package org.elasticsearch.aggregations.metric; import org.elasticsearch.aggregations.AggregationsPlugin; -import org.elasticsearch.aggregations.metric.InternalMatrixStats.Fields; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static org.mockito.Mockito.mock; @@ -60,15 +51,6 @@ public void setUp() throws Exception { } } - @Override - protected List getNamedXContents() { - ContextParser parser = (p, c) -> ParsedMatrixStats.fromXContent(p, (String) c); - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(MatrixStatsAggregationBuilder.NAME), parser) - ); - } - @Override protected InternalMatrixStats createTestInstance(String name, Map metadata) { double[] values = new double[fields.length]; @@ -174,48 +156,4 @@ public void testReduceRandom() { protected void assertReduced(InternalMatrixStats reduced, List inputs) { throw new UnsupportedOperationException(); } - - @Override - protected void assertFromXContent(InternalMatrixStats expected, ParsedAggregation parsedAggregation) throws IOException { - assertTrue(parsedAggregation instanceof ParsedMatrixStats); - ParsedMatrixStats actual = (ParsedMatrixStats) parsedAggregation; - - assertEquals(expected.getDocCount(), actual.getDocCount()); - - for (String field : fields) { - assertEquals(expected.getFieldCount(field), actual.getFieldCount(field)); - assertEquals(expected.getMean(field), actual.getMean(field), 0.0); - assertEquals(expected.getVariance(field), actual.getVariance(field), 0.0); - assertEquals(expected.getSkewness(field), actual.getSkewness(field), 0.0); - assertEquals(expected.getKurtosis(field), actual.getKurtosis(field), 0.0); - - for (String other : fields) { - assertEquals(expected.getCovariance(field, other), actual.getCovariance(field, other), 0.0); - assertEquals(expected.getCorrelation(field, other), actual.getCorrelation(field, other), 0.0); - } - } - - String unknownField = randomAlphaOfLength(3); - String other = randomValueOtherThan(unknownField, () -> randomAlphaOfLength(3)); - // getFieldCount returns 0 for unknown fields - assertEquals(0.0, actual.getFieldCount(unknownField), 0.0); - - expectThrows(IllegalArgumentException.class, () -> actual.getMean(unknownField)); - expectThrows(IllegalArgumentException.class, () -> actual.getVariance(unknownField)); - expectThrows(IllegalArgumentException.class, () -> actual.getSkewness(unknownField)); - expectThrows(IllegalArgumentException.class, () -> actual.getKurtosis(unknownField)); - - expectThrows(IllegalArgumentException.class, () -> actual.getCovariance(unknownField, unknownField)); - expectThrows(IllegalArgumentException.class, () -> actual.getCovariance(unknownField, other)); - expectThrows(IllegalArgumentException.class, () -> actual.getCovariance(other, unknownField)); - - assertEquals(1.0, actual.getCorrelation(unknownField, unknownField), 0.0); - expectThrows(IllegalArgumentException.class, () -> actual.getCorrelation(unknownField, other)); - expectThrows(IllegalArgumentException.class, () -> actual.getCorrelation(other, unknownField)); - } - - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.endsWith(Fields.CORRELATION) || path.endsWith(Fields.COVARIANCE); - } } diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeResultTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeResultTests.java index 8180e6482bd0d..e0a41ca7bddd9 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeResultTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeResultTests.java @@ -12,17 +12,11 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.pipeline.ParsedDerivative; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Stream; public class DerivativeResultTests extends InternalAggregationTestCase { @Override @@ -30,20 +24,6 @@ protected SearchPlugin registerPlugin() { return new AggregationsPlugin(); } - @Override - protected List getNamedXContents() { - return Stream.concat( - super.getNamedXContents().stream(), - Stream.of( - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(DerivativePipelineAggregationBuilder.NAME), - (p, c) -> ParsedDerivative.fromXContent(p, (String) c) - ) - ) - ).toList(); - } - @Override protected Derivative createTestInstance(String name, Map metadata) { DocValueFormat formatter = randomNumericDocValueFormat(); @@ -64,19 +44,6 @@ protected void assertReduced(Derivative reduced, List inputs) { // no test since reduce operation is unsupported } - @Override - protected void assertFromXContent(Derivative derivative, ParsedAggregation parsedAggregation) { - ParsedDerivative parsed = ((ParsedDerivative) parsedAggregation); - if (Double.isInfinite(derivative.getValue()) == false && Double.isNaN(derivative.getValue()) == false) { - assertEquals(derivative.getValue(), parsed.value(), Double.MIN_VALUE); - assertEquals(derivative.getValueAsString(), parsed.getValueAsString()); - } else { - // we write Double.NEGATIVE_INFINITY, Double.POSITIVE amd Double.NAN to xContent as 'null', so we - // cannot differentiate between them. Also we cannot recreate the exact String representation - assertEquals(parsed.value(), Double.NaN, Double.MIN_VALUE); - } - } - @Override protected Derivative mutateInstance(Derivative instance) { String name = instance.getName(); diff --git a/modules/apm/METERING.md b/modules/apm/METERING.md index 0f5fcc977295d..49b365e135e2b 100644 --- a/modules/apm/METERING.md +++ b/modules/apm/METERING.md @@ -11,7 +11,7 @@ We use elastic's apm-java-agent as an implementation of the API we expose. the implementation can be found in `:modules:apm` The apm-java-agent is responsible for buffering metrics and upon metrics_interval send them over to apm server. -Metrics_interval is configured via a `tracing.apm.agent.metrics_interval` setting +Metrics_interval is configured via a `telemetry.agent.metrics_interval` setting The agent also collects a number of JVM metrics. see https://www.elastic.co/guide/en/apm/agent/java/current/metrics.html#metrics-jvm @@ -108,7 +108,7 @@ rootProject { setting 'xpack.security.audit.enabled', 'true' keystore 'tracing.apm.secret_token', 'TODO-REPLACE' setting 'telemetry.metrics.enabled', 'true' - setting 'tracing.apm.agent.server_url', 'https://TODO-REPLACE-URL.apm.eastus2.staging.azure.foundit.no:443' + setting 'telemetry.agent.server_url', 'https://TODO-REPLACE-URL.apm.eastus2.staging.azure.foundit.no:443' } } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index 2c33b4f2dc992..9f268a522fa64 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -34,7 +34,7 @@ * make this approach difficult to the point of impossibility. *

* All settings are found under the tracing.apm. prefix. Any setting under - * the tracing.apm.agent. prefix will be forwarded on to the APM Java agent + * the telemetry.agent. prefix will be forwarded on to the APM Java agent * by setting appropriate system properties. Some settings can only be set once, and must be * set when the agent starts. We therefore also create and configure a config file in * the {@code APMJvmOptions} class, which we then delete when Elasticsearch starts, so that @@ -64,16 +64,17 @@ public TelemetryProvider getTelemetryProvider(Settings settings) { @Override public Collection createComponents(PluginServices services) { final APMTracer apmTracer = telemetryProvider.get().getTracer(); + final APMMeterService apmMeter = telemetryProvider.get().getMeterService(); apmTracer.setClusterName(services.clusterService().getClusterName().value()); apmTracer.setNodeName(services.clusterService().getNodeName()); final APMAgentSettings apmAgentSettings = new APMAgentSettings(); apmAgentSettings.syncAgentSystemProperties(settings); - final APMMeterService apmMeter = new APMMeterService(settings); - apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); + + apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get()); logger.info("Sending apm metrics is {}", APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); - logger.info("Sending apm tracing is {}", APMAgentSettings.APM_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); + logger.info("Sending apm tracing is {}", APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); return List.of(apmTracer, apmMeter); } @@ -81,14 +82,24 @@ public Collection createComponents(PluginServices services) { @Override public List> getSettings() { return List.of( - APMAgentSettings.APM_ENABLED_SETTING, - APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING, - APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, - APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, - APMAgentSettings.APM_TRACING_SANITIZE_FIELD_NAMES, + // APM general APMAgentSettings.APM_AGENT_SETTINGS, - APMAgentSettings.APM_SECRET_TOKEN_SETTING, - APMAgentSettings.APM_API_KEY_SETTING + APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING, + APMAgentSettings.TELEMETRY_API_KEY_SETTING, + // Metrics + APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING, + // Tracing + APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING, + APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, + APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, + APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES, + // The settings below are deprecated and are currently kept as fallback. + APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING, + APMAgentSettings.TRACING_APM_API_KEY_SETTING, + APMAgentSettings.TRACING_APM_ENABLED_SETTING, + APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING, + APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING, + APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES ); } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 41816318a3586..c929d1c484be1 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -22,8 +22,9 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.List; -import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; @@ -36,26 +37,12 @@ public class APMAgentSettings { private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class); - /** - * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent - * config file, as then their values could not be overridden dynamically via system properties. - */ - static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( - "transaction_sample_rate", - "0.2", - "enable_experimental_instrumentations", - "true" - ); - - public void addClusterSettingsListeners( - ClusterService clusterService, - APMTelemetryProvider apmTelemetryProvider, - APMMeterService apmMeterService - ) { + public void addClusterSettingsListeners(ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final APMTracer apmTracer = apmTelemetryProvider.getTracer(); + final APMMeterService apmMeterService = apmTelemetryProvider.getMeterService(); - clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, enabled -> { + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); this.setAgentSetting("instrument", Boolean.toString(enabled)); }); @@ -65,9 +52,9 @@ public void addClusterSettingsListeners( // minimise its impact to a running Elasticsearch. this.setAgentSetting("recording", Boolean.toString(enabled)); }); - clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, apmTracer::setIncludeNames); - clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_EXCLUDE_SETTING, apmTracer::setExcludeNames); - clusterSettings.addSettingsUpdateConsumer(APM_TRACING_SANITIZE_FIELD_NAMES, apmTracer::setLabelFilters); + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, apmTracer::setIncludeNames); + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, apmTracer::setExcludeNames); + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_SANITIZE_FIELD_NAMES, apmTracer::setLabelFilters); clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(this::setAgentSetting), (x, y) -> {}); } @@ -76,17 +63,8 @@ public void addClusterSettingsListeners( * @param settings the settings to apply */ public void syncAgentSystemProperties(Settings settings) { - this.setAgentSetting("recording", Boolean.toString(APM_ENABLED_SETTING.get(settings))); - - // Apply default values for some system properties. Although we configure - // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't - // do anything if those settings are never configured. - APM_AGENT_DEFAULT_SETTINGS.keySet() - .forEach( - key -> this.setAgentSetting(key, APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings)) - ); - - // Then apply values from the settings in the cluster state + this.setAgentSetting("recording", Boolean.toString(TELEMETRY_TRACING_ENABLED_SETTING.get(settings))); + // Apply values from the settings in the cluster state APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); } @@ -111,49 +89,193 @@ public void setAgentSetting(String key, String value) { }); } - private static final String APM_SETTING_PREFIX = "tracing.apm."; + private static final String TELEMETRY_SETTING_PREFIX = "telemetry."; + + // The old legacy prefix + private static final String LEGACY_TRACING_APM_SETTING_PREFIX = "tracing.apm."; /** - * A list of APM agent config keys that should never be configured by the user. + * Allow-list of APM agent config keys users are permitted to configure. + * @see APM Java Agent Configuration */ - private static final List PROHIBITED_AGENT_KEYS = List.of( - // ES generates a config file and sets this value - "config_file", - // ES controls this via `telemetry.metrics.enabled` - "recording", - // ES controls this via `apm.enabled` - "instrument" + private static final Set PERMITTED_AGENT_KEYS = Set.of( + // Circuit-Breaker: + "circuit_breaker_enabled", + "stress_monitoring_interval", + "stress_monitor_gc_stress_threshold", + "stress_monitor_gc_relief_threshold", + "stress_monitor_cpu_duration_threshold", + "stress_monitor_system_cpu_stress_threshold", + "stress_monitor_system_cpu_relief_threshold", + + // Core: + // forbid 'enabled', must remain enabled to dynamically enable tracing / metrics + // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'tracing.apm.enabled' + "service_name", + "service_node_name", + // forbid 'service_version', forced by APMJvmOptions + "hostname", + "environment", + "transaction_sample_rate", + "transaction_max_spans", + "long_field_max_length", + "sanitize_field_names", + "enable_instrumentations", + "disable_instrumentations", + // forbid 'enable_experimental_instrumentations', expected to be always enabled by APMJvmOptions + "unnest_exceptions", + "ignore_exceptions", + "capture_body", + "capture_headers", + "global_labels", + "instrument_ancient_bytecode", + "context_propagation_only", + "classes_excluded_from_instrumentation", + "trace_methods", + "trace_methods_duration_threshold", + // forbid 'central_config', may impact usage of config_file, disabled in APMJvmOptions + // forbid 'config_file', configured by APMJvmOptions + "breakdown_metrics", + "plugins_dir", + "use_elastic_traceparent_header", + "disable_outgoing_tracecontext_headers", + "span_min_duration", + "cloud_provider", + "enable_public_api_annotation_inheritance", + "transaction_name_groups", + "trace_continuation_strategy", + "baggage_to_attach", + + // Datastore: irrelevant, not whitelisted + + // HTTP: + "capture_body_content_types", + "transaction_ignore_urls", + "transaction_ignore_user_agents", + "use_path_as_transaction_name", + // forbid deprecated url_groups + + // Huge Traces: + "span_compression_enabled", + "span_compression_exact_match_max_duration", + "span_compression_same_kind_max_duration", + "exit_span_min_duration", + + // JAX-RS: irrelevant, not whitelisted + + // JMX: + "capture_jmx_metrics", + + // Logging: + "log_level", // allow overriding the default in APMJvmOptions + // forbid log_file, always set by APMJvmOptions + "log_ecs_reformatting", + "log_ecs_reformatting_additional_fields", + "log_ecs_formatter_allow_list", + // forbid log_ecs_reformatting_dir, always use logsDir provided in APMJvmOptions + "log_file_size", + // forbid log_format_sout, always use file logging + // forbid log_format_file, expected to be JSON in APMJvmOptions + "log_sending", + + // Messaging: irrelevant, not whitelisted + + // Metrics: + "dedot_custom_metrics", + "custom_metrics_histogram_boundaries", + "metric_set_limit", + "agent_reporter_health_metrics", + "agent_background_overhead_metrics", + + // Profiling: + "profiling_inferred_spans_enabled", + "profiling_inferred_spans_logging_enabled", + "profiling_inferred_spans_sampling_interval", + "profiling_inferred_spans_min_duration", + "profiling_inferred_spans_included_classes", + "profiling_inferred_spans_excluded_classes", + "profiling_inferred_spans_lib_directory", + + // Reporter: + // forbid secret_token: use tracing.apm.secret_token instead + // forbid api_key: use tracing.apm.api_key instead + "server_url", + "server_urls", + "disable_send", + "server_timeout", + "verify_server_cert", + "max_queue_size", + "include_process_args", + "api_request_time", + "api_request_size", + "metrics_interval", + "disable_metrics", + + // Serverless: + "aws_lambda_handler", + "data_flush_timeout", + + // Stacktraces: + "application_packages", + "stack_trace_limit", + "span_stack_trace_min_duration" ); public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( - APM_SETTING_PREFIX + "agent.", - (qualifiedKey) -> { - final String[] parts = qualifiedKey.split("\\."); - final String key = parts[parts.length - 1]; - final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); - return new Setting<>(qualifiedKey, defaultValue, (value) -> { - if (PROHIBITED_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException("Explicitly configuring [" + qualifiedKey + "] is prohibited"); + TELEMETRY_SETTING_PREFIX + "agent.", + LEGACY_TRACING_APM_SETTING_PREFIX + "agent.", + (namespace, qualifiedKey) -> new Setting<>(qualifiedKey, "", (value) -> { + if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(namespace) == false) { + if (namespace.startsWith("global_labels.")) { + // The nested labels syntax is transformed in APMJvmOptions. + // Ignore these here to not fail if not correctly removed. + return value; } - return value; - }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); - } + throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown."); + } + return value; + }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic) ); - public static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.stringListSetting( - APM_SETTING_PREFIX + "names.include", + /** + * To be deprecated in favor of TELEMETRY_TRACING_NAMES_INCLUDE_SETTING. + */ + public static final Setting> TRACING_APM_NAMES_INCLUDE_SETTING = Setting.stringListSetting( + LEGACY_TRACING_APM_SETTING_PREFIX + "names.include", OperatorDynamic, NodeScope ); - public static final Setting> APM_TRACING_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( - APM_SETTING_PREFIX + "names.exclude", + public static final Setting> TELEMETRY_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( + TELEMETRY_SETTING_PREFIX + "tracing.names.include", + TRACING_APM_NAMES_INCLUDE_SETTING, + Function.identity(), OperatorDynamic, NodeScope ); - public static final Setting> APM_TRACING_SANITIZE_FIELD_NAMES = Setting.stringListSetting( - APM_SETTING_PREFIX + "sanitize_field_names", + /** + * To be deprecated in favor of TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING. + */ + public static final Setting> TRACING_APM_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( + LEGACY_TRACING_APM_SETTING_PREFIX + "names.exclude", + OperatorDynamic, + NodeScope + ); + + public static final Setting> TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING = Setting.listSetting( + TELEMETRY_SETTING_PREFIX + "tracing.names.exclude", + TRACING_APM_NAMES_EXCLUDE_SETTING, + Function.identity(), + OperatorDynamic, + NodeScope + ); + + /** + * To be deprecated in favor of TELEMETRY_TRACING_SANITIZE_FIELD_NAMES. + */ + public static final Setting> TRACING_APM_SANITIZE_FIELD_NAMES = Setting.stringListSetting( + LEGACY_TRACING_APM_SETTING_PREFIX + "sanitize_field_names", List.of( "password", "passwd", @@ -172,24 +294,61 @@ public void setAgentSetting(String key, String value) { NodeScope ); - public static final Setting APM_ENABLED_SETTING = Setting.boolSetting( - APM_SETTING_PREFIX + "enabled", + public static final Setting> TELEMETRY_TRACING_SANITIZE_FIELD_NAMES = Setting.listSetting( + TELEMETRY_SETTING_PREFIX + "tracing.sanitize_field_names", + TRACING_APM_SANITIZE_FIELD_NAMES, + Function.identity(), + OperatorDynamic, + NodeScope + ); + + /** + * To be deprecated in favor of TELEMETRY_TRACING_ENABLED_SETTING. + */ + public static final Setting TRACING_APM_ENABLED_SETTING = Setting.boolSetting( + LEGACY_TRACING_APM_SETTING_PREFIX + "enabled", false, OperatorDynamic, NodeScope ); + public static final Setting TELEMETRY_TRACING_ENABLED_SETTING = Setting.boolSetting( + TELEMETRY_SETTING_PREFIX + "tracing.enabled", + TRACING_APM_ENABLED_SETTING, + OperatorDynamic, + NodeScope + ); + public static final Setting TELEMETRY_METRICS_ENABLED_SETTING = Setting.boolSetting( - "telemetry.metrics.enabled", + TELEMETRY_SETTING_PREFIX + "metrics.enabled", false, OperatorDynamic, NodeScope ); - public static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( - APM_SETTING_PREFIX + "secret_token", + /** + * To be deprecated in favor of TELEMETRY_SECRET_TOKEN_SETTING. + */ + public static final Setting TRACING_APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( + LEGACY_TRACING_APM_SETTING_PREFIX + "secret_token", + null + ); + + public static final Setting TELEMETRY_SECRET_TOKEN_SETTING = SecureSetting.secureString( + TELEMETRY_SETTING_PREFIX + "secret_token", + TRACING_APM_SECRET_TOKEN_SETTING + ); + + /** + * To be deprecated in favor of TELEMETRY_API_KEY_SETTING. + */ + public static final Setting TRACING_APM_API_KEY_SETTING = SecureSetting.secureString( + LEGACY_TRACING_APM_SETTING_PREFIX + "api_key", null ); - public static final Setting APM_API_KEY_SETTING = SecureSetting.secureString(APM_SETTING_PREFIX + "api_key", null); + public static final Setting TELEMETRY_API_KEY_SETTING = SecureSetting.secureString( + TELEMETRY_SETTING_PREFIX + "api_key", + TRACING_APM_API_KEY_SETTING + ); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java index 21f0b8491f644..ae1204e75af1a 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java @@ -49,7 +49,7 @@ public APMMeterRegistry getMeterRegistry() { } /** - * @see APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider, APMMeterService) + * @see APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider) */ void setEnabled(boolean enabled) { this.enabled = enabled; diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java index 5b78c2f5f6a3c..d7b061b4b0d19 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java @@ -14,12 +14,10 @@ import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; public class APMTelemetryProvider implements TelemetryProvider { - private final Settings settings; private final APMTracer apmTracer; private final APMMeterService apmMeterService; public APMTelemetryProvider(Settings settings) { - this.settings = settings; apmTracer = new APMTracer(settings); apmMeterService = new APMMeterService(settings); } @@ -29,6 +27,10 @@ public APMTracer getTracer() { return apmTracer; } + public APMMeterService getMeterService() { + return apmMeterService; + } + @Override public APMMeterRegistry getMeterRegistry() { return apmMeterService.getMeterRegistry(); diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java index 1a698b778687c..9ab7412426db8 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java @@ -32,6 +32,13 @@ public class MetricNameValidator { static final int MAX_ELEMENT_LENGTH = 30; static final int MAX_NUMBER_OF_ELEMENTS = 10; + static final Set SKIP_VALIDATION_METRIC_NAMES_DUE_TO_BWC = Set.of( + "searchable_snapshots_cache_fetch_async", + "searchable_snapshots_cache_prewarming", + "security-token-key", + "security-crypto" + ); + private MetricNameValidator() {} /** @@ -42,6 +49,10 @@ private MetricNameValidator() {} */ public static String validate(String metricName) { Objects.requireNonNull(metricName); + + if (skipValidationToBWC(metricName)) { + return metricName; + } validateMaxMetricNameLength(metricName); String[] elements = metricName.split("\\."); @@ -53,6 +64,19 @@ public static String validate(String metricName) { return metricName; } + /** + * Due to backwards compatibility some metric names would have to skip validation. + * This is for instance where a threadpool name is too long, or contains `-` + * We want to allow to easily find threadpools in code base that are alerting with a metric + * as well as find thread pools metrics in dashboards with their codebase names. + * Renaming a threadpool name would be a breaking change. + * + * NOTE: only allow skipping validation if a refactor in codebase would cause a breaking change + */ + private static boolean skipValidationToBWC(String metricName) { + return SKIP_VALIDATION_METRIC_NAMES_DUE_TO_BWC.stream().anyMatch(m -> metricName.contains(m)); + } + private static void validateMaxMetricNameLength(String metricName) { if (metricName.length() > MAX_METRIC_NAME_LENGTH) { throw new IllegalArgumentException( @@ -108,6 +132,7 @@ private static void hasESPrefix(String[] elements, String name) { private static void perElementValidations(String[] elements, String name) { for (String element : elements) { + hasOnlyAllowedCharacters(element, name); hasNotBreachLengthLimit(element, name); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index f021eb61ca753..a88207eace06f 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -33,6 +33,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.tasks.Task; +import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; import org.elasticsearch.telemetry.tracing.TraceContext; import org.elasticsearch.telemetry.tracing.Traceable; @@ -43,11 +44,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_ENABLED_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_TRACING_SANITIZE_FIELD_NAMES; - /** * This is an implementation of the {@link org.elasticsearch.telemetry.tracing.Tracer} interface, which uses * the OpenTelemetry API to capture spans. @@ -89,13 +85,13 @@ public void setNodeName(String nodeName) { record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} public APMTracer(Settings settings) { - this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); - this.excludeNames = APM_TRACING_NAMES_EXCLUDE_SETTING.get(settings); - this.labelFilters = APM_TRACING_SANITIZE_FIELD_NAMES.get(settings); + this.includeNames = APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); + this.excludeNames = APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); + this.labelFilters = APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); this.filterAutomaton = buildAutomaton(includeNames, excludeNames); this.labelFilterAutomaton = buildAutomaton(labelFilters, List.of()); - this.enabled = APM_ENABLED_SETTING.get(settings); + this.enabled = APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.get(settings); } public void setEnabled(boolean enabled) { diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index b22a57bb9bf0c..52607a79fe69d 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -8,11 +8,17 @@ package org.elasticsearch.telemetry.apm.internal; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.List; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public class APMAgentSettingsTests extends ESTestCase { @@ -20,9 +26,17 @@ public class APMAgentSettingsTests extends ESTestCase { /** * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. */ - public void test_whenTracerEnabled_setsRecordingProperty() { + public void testEnableTracing() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + verify(apmAgentSettings).setAgentSetting("recording", "true"); + } + + public void testEnableTracingUsingLegacySetting() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_ENABLED_SETTING.getKey(), true).build(); apmAgentSettings.syncAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "true"); @@ -31,57 +45,117 @@ public void test_whenTracerEnabled_setsRecordingProperty() { /** * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. */ - public void test_whenTracerDisabled_setsRecordingProperty() { + public void testDisableTracing() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); apmAgentSettings.syncAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "false"); } - /** - * Check that when cluster settings are synchronised with the system properties, default values are - * applied. - */ - public void test_whenTracerCreated_defaultSettingsApplied() { + public void testDisableTracingUsingLegacySetting() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_ENABLED_SETTING.getKey(), false).build(); apmAgentSettings.syncAgentSystemProperties(settings); - verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.2"); + verify(apmAgentSettings).setAgentSetting("recording", "false"); } /** - * Check that when cluster settings are synchronised with the system properties, values in the settings - * are reflected in the system properties, overwriting default values. + * Check that when cluster settings are synchronised with the system properties, agent settings are set. */ - public void test_whenTracerCreated_clusterSettingsOverrideDefaults() { + public void testSetAgentSettings() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75") + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") .build(); apmAgentSettings.syncAgentSystemProperties(settings); - // This happens twice because we first apply the default settings, whose values are overridden - // from the cluster settings, then we apply all the APM-agent related settings, not just the - // ones with default values. Although there is some redundancy here, it only happens at startup - // for a very small number of settings. - verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75"); + verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } - /** - * Check that when cluster settings are synchronised with the system properties, agent settings other - * than those with default values are set. - */ - public void test_whenTracerCreated_clusterSettingsAlsoApplied() { + public void testSetAgentsSettingsWithLegacyPrefix() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .put("tracing.apm.agent.span_compression_enabled", "true") .build(); apmAgentSettings.syncAgentSystemProperties(settings); + verify(apmAgentSettings).setAgentSetting("recording", "true"); verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } + + /** + * Check that invalid or forbidden APM agent settings are rejected. + */ + public void testRejectForbiddenOrUnknownAgentSettings() { + List prefixes = List.of(APMAgentSettings.APM_AGENT_SETTINGS.getKey(), "tracing.apm.agent."); + for (String prefix : prefixes) { + Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); + Exception exception = expectThrows( + IllegalArgumentException.class, + () -> APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings) + ); + assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); + } + // though, accept / ignore nested global_labels + for (String prefix : prefixes) { + Settings settings = Settings.builder().put(prefix + "global_labels." + randomAlphaOfLength(5), "123").build(); + APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings); + } + } + + public void testTelemetryTracingNamesIncludeFallback() { + Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING.getKey(), "abc,xyz").build(); + + List included = APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); + + assertThat(included, containsInAnyOrder("abc", "xyz")); + } + + public void testTelemetryTracingNamesExcludeFallback() { + Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING.getKey(), "abc,xyz").build(); + + List included = APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); + + assertThat(included, containsInAnyOrder("abc", "xyz")); + } + + public void testTelemetryTracingSanitizeFieldNamesFallback() { + Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES.getKey(), "abc,xyz").build(); + + List included = APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); + + assertThat(included, containsInAnyOrder("abc", "xyz")); + } + + public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() { + List included = APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(Settings.EMPTY); + assertThat(included, hasItem("password")); // and more defaults + } + + public void testTelemetrySecretTokenFallback() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING.getKey(), "verysecret"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + + try (SecureString secureString = APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING.get(settings)) { + assertEquals("verysecret", secureString.toString()); + + } + } + + public void testTelemetryApiKeyFallback() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(APMAgentSettings.TRACING_APM_API_KEY_SETTING.getKey(), "abc"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + + try (SecureString secureString = APMAgentSettings.TELEMETRY_API_KEY_SETTING.get(settings)) { + assertEquals("abc", secureString.toString()); + + } + } } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java index 64f78d0af494c..9a5479cc65a93 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java @@ -78,6 +78,13 @@ public void testLastElementAllowList() { expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.somemodule.somemetric.some_other_suffix")); } + public void testSkipValidationDueToBWC() { + for (String partOfMetricName : MetricNameValidator.SKIP_VALIDATION_METRIC_NAMES_DUE_TO_BWC) { + MetricNameValidator.validate("es.threadpool." + partOfMetricName + ".total");// fake metric name, but with the part that skips + // validation + } + } + public static String metricNameWithLength(int length) { int prefixAndSuffix = "es.".length() + ".utilization".length(); assert length > prefixAndSuffix : "length too short"; @@ -99,4 +106,5 @@ public static String metricNameWithLength(int length) { metricName.append("utilization"); return metricName.toString(); } + } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java index 04a4e1b3f3a34..789b9294e2a6e 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java @@ -53,7 +53,7 @@ public class APMTracerTests extends ESTestCase { * Check that the tracer doesn't create spans when tracing is disabled. */ public void test_onTraceStarted_withTracingDisabled_doesNotStartTrace() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); APMTracer apmTracer = buildTracer(settings); apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); @@ -66,8 +66,8 @@ public void test_onTraceStarted_withTracingDisabled_doesNotStartTrace() { */ public void test_onTraceStarted_withSpanNameOmitted_doesNotStartTrace() { Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .putList(APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), List.of("filtered*")) + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .putList(APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.getKey(), List.of("filtered*")) .build(); APMTracer apmTracer = buildTracer(settings); @@ -80,7 +80,7 @@ public void test_onTraceStarted_withSpanNameOmitted_doesNotStartTrace() { * Check that when a trace is started, the tracer starts a span and records it. */ public void test_onTraceStarted_startsTrace() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); @@ -93,7 +93,7 @@ public void test_onTraceStarted_startsTrace() { * Checks that when a trace is started with a specific start time, the tracer starts a span and records it. */ public void test_onTraceStartedWithStartTime_startsTrace() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); ThreadContext threadContext = new ThreadContext(settings); @@ -111,7 +111,7 @@ public void test_onTraceStartedWithStartTime_startsTrace() { * Check that when a trace is stopped, the tracer ends the span and removes the record of it. */ public void test_onTraceStopped_stopsTrace() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); @@ -128,7 +128,7 @@ public void test_onTraceStopped_stopsTrace() { * check that the local context object is added, however. */ public void test_whenTraceStarted_threadContextIsPopulated() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); ThreadContext threadContext = new ThreadContext(settings); @@ -148,8 +148,8 @@ public void test_whenTraceStarted_andSpanNameIncluded_thenSpanIsStarted() { "name-b*" ); Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .putList(APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .putList(APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) .build(); APMTracer apmTracer = buildTracer(settings); @@ -170,9 +170,9 @@ public void test_whenTraceStarted_andSpanNameIncludedAndExcluded_thenSpanIsNotSt final List includePatterns = List.of("name-a*"); final List excludePatterns = List.of("name-a*"); Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .putList(APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) - .putList(APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .putList(APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) + .putList(APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) .build(); APMTracer apmTracer = buildTracer(settings); @@ -193,8 +193,8 @@ public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { "name-b*" ); Settings settings = Settings.builder() - .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .putList(APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) + .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .putList(APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) .build(); APMTracer apmTracer = buildTracer(settings); @@ -211,7 +211,7 @@ public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { * Check that sensitive attributes are not added verbatim to a span, but instead the value is redacted. */ public void test_whenAddingAttributes_thenSensitiveValuesAreRedacted() { - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); + Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); APMTracer apmTracer = buildTracer(settings); CharacterRunAutomaton labelFilterAutomaton = apmTracer.getLabelFilterAutomaton(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index cf974abf4fda9..34f1701a595de 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -11,9 +11,9 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -1831,16 +1831,11 @@ public void onFailure(Exception e) { assertThat(indicesStatsResponse.getIndices().size(), equalTo(2)); } - private static void verifyResolvability(String dataStream, ActionRequestBuilder requestBuilder, boolean fail) { + private static void verifyResolvability(String dataStream, RequestBuilder requestBuilder, boolean fail) { verifyResolvability(dataStream, requestBuilder, fail, 0); } - private static void verifyResolvability( - String dataStream, - ActionRequestBuilder requestBuilder, - boolean fail, - long expectedCount - ) { + private static void verifyResolvability(String dataStream, RequestBuilder requestBuilder, boolean fail, long expectedCount) { if (fail) { String expectedErrorMessage = "no such index [" + dataStream + "]"; if (requestBuilder instanceof MultiSearchRequestBuilder) { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index f34bb96b3eb81..9880e5e9914a8 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -30,6 +29,7 @@ import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; @@ -313,7 +313,7 @@ public void testAutomaticForceMerge() throws Exception { for (int i = 0; i < randomIntBetween(10, 50); i++) { indexDocs(dataStreamName, randomIntBetween(1, 300)); // Make sure the segments get written: - FlushResponse flushResponse = indicesAdmin().flush(new FlushRequest(toBeRolledOverIndex)).actionGet(); + BroadcastResponse flushResponse = indicesAdmin().flush(new FlushRequest(toBeRolledOverIndex)).actionGet(); assertThat(flushResponse.getStatus(), equalTo(RestStatus.OK)); } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AutoCreateDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AutoCreateDataStreamIT.java index 5ea366784c66a..4ffdc128f8ec0 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AutoCreateDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AutoCreateDataStreamIT.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.datastreams; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -23,6 +24,7 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.Matchers.containsString; +@SuppressWarnings("resource") public class AutoCreateDataStreamIT extends DisabledSecurityDataStreamTestCase { /** @@ -31,7 +33,7 @@ public class AutoCreateDataStreamIT extends DisabledSecurityDataStreamTestCase { */ public void testCanAutoCreateDataStreamWhenAutoCreateIndexDisabled() throws IOException { configureAutoCreateIndex(false); - createTemplateWithAllowAutoCreate(null); + createTemplate(null, true); assertOK(this.indexDocument()); } @@ -40,7 +42,7 @@ public void testCanAutoCreateDataStreamWhenAutoCreateIndexDisabled() throws IOEx * and that template has allow_auto_create set to true. */ public void testCanAutoCreateDataStreamWhenExplicitlyAllowedByTemplate() throws IOException { - createTemplateWithAllowAutoCreate(true); + createTemplate(true, true); // Attempt to add a document to a non-existing index. Auto-creating the index should succeed because the index name // matches the template pattern @@ -52,15 +54,60 @@ public void testCanAutoCreateDataStreamWhenExplicitlyAllowedByTemplate() throws * allow_auto_create explicitly to false. */ public void testCannotAutoCreateDataStreamWhenDisallowedByTemplate() throws IOException { - createTemplateWithAllowAutoCreate(false); + createTemplate(false, true); - // Attempt to add a document to a non-existing index. Auto-creating the index should succeed because the index name - // matches the template pattern + // Auto-creating the index should fail when the template disallows that final ResponseException responseException = expectThrows(ResponseException.class, this::indexDocument); assertThat( Streams.copyToString(new InputStreamReader(responseException.getResponse().getEntity().getContent(), UTF_8)), - containsString("no such index [composable template [recipe*] forbids index auto creation]") + containsString("no such index [recipe_kr] and composable template [recipe*] forbids index auto creation") + ); + } + + /** + * Check that if require_data_stream is set to true, automatically creating an index is allowed only + * if its name matches an index template AND it contains a data-stream template + */ + public void testCannotAutoCreateDataStreamWhenNoDataStreamTemplateMatch() throws IOException { + createTemplate(true, true); + + final Request request = prepareIndexRequest("ingredients_kr"); + request.addParameter(DocWriteRequest.REQUIRE_DATA_STREAM, Boolean.TRUE.toString()); + + // Attempt to add a document to a non-existing index. Auto-creating the index should fail because the index name doesn't + // match the template pattern and the request requires a data stream template + final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); + + assertThat( + Streams.copyToString(new InputStreamReader(responseException.getResponse().getEntity().getContent(), UTF_8)), + containsString( + "no such index [ingredients_kr] and the index creation request requires a data stream, " + + "but no matching index template with data stream template was found for it" + ) + ); + } + + /** + * Check that if require_data_stream is set to true, automatically creating an index is allowed only + * if its name matches an index template AND it contains a data-stream template + */ + public void testCannotAutoCreateDataStreamWhenMatchingTemplateIsNotDataStream() throws IOException { + createTemplate(true, false); + + final Request request = prepareIndexRequest("recipe_kr"); + request.addParameter(DocWriteRequest.REQUIRE_DATA_STREAM, Boolean.TRUE.toString()); + + // Attempt to add a document to a non-existing index. Auto-creating the index should fail because the index name doesn't + // match the template pattern and the request requires a data stream template + final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); + + assertThat( + Streams.copyToString(new InputStreamReader(responseException.getResponse().getEntity().getContent(), UTF_8)), + containsString( + "no such index [recipe_kr] and the index creation request requires a data stream, " + + "but no matching index template with data stream template was found for it" + ) ); } @@ -78,7 +125,7 @@ private void configureAutoCreateIndex(boolean value) throws IOException { assertOK(settingsResponse); } - private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws IOException { + private void createTemplate(Boolean allowAutoCreate, boolean addDataStreamTemplate) throws IOException { XContentBuilder b = JsonXContent.contentBuilder(); b.startObject(); { @@ -86,8 +133,10 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I if (allowAutoCreate != null) { b.field("allow_auto_create", allowAutoCreate); } - b.startObject("data_stream"); - b.endObject(); + if (addDataStreamTemplate) { + b.startObject("data_stream"); + b.endObject(); + } } b.endObject(); @@ -98,8 +147,13 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I } private Response indexDocument() throws IOException { - final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc"); - indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }"); + final Request indexDocumentRequest = prepareIndexRequest("recipe_kr"); return client().performRequest(indexDocumentRequest); } + + private Request prepareIndexRequest(String indexName) { + final Request indexDocumentRequest = new Request("POST", indexName + "/_doc"); + indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }"); + return indexDocumentRequest; + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 519499addd77e..694e015b602f8 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -31,7 +31,7 @@ import java.io.UncheckedIOException; import java.time.Instant; import java.util.ArrayList; -import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -177,14 +177,18 @@ private List findRoutingPaths(String indexName, Settings allSettings, Li } MappingParserContext parserContext = mapperService.parserContext(); - for (String pathMatch : template.pathMatch()) { + for (Iterator iterator = template.pathMatch().iterator(); iterator.hasNext();) { var mapper = parserContext.typeParser(mappingSnippetType) - // Since FieldMapper.parse modifies the Map passed in (removing entries for "type"), that means - // that only the first pathMatch passed in gets recognized as a time_series_dimension. To counteract - // that, we wrap the mappingSnippet in a new HashMap for each pathMatch instance. - .parse(pathMatch, new HashMap<>(mappingSnippet), parserContext) + .parse(iterator.next(), mappingSnippet, parserContext) .build(MapperBuilderContext.root(false, false)); extractPath(routingPaths, mapper); + if (iterator.hasNext()) { + // Since FieldMapper.parse modifies the Map passed in (removing entries for "type"), that means + // that only the first pathMatch passed in gets recognized as a time_series_dimension. + // To avoid this, each parsing call uses a new mapping snippet. + // Note that a shallow copy of the mappingSnippet map is not enough if there are multi-fields. + mappingSnippet = template.mappingForName(templateName, KeywordFieldMapper.CONTENT_TYPE); + } } } return routingPaths; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index 63920ed73bf4a..b69ea170eb476 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -21,6 +21,7 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -230,6 +231,7 @@ public Collection createComponents(PluginServices services) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index e756ba32ec699..6e7528c470d49 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -155,6 +155,7 @@ static ClusterState removeDataStream( DataStream dataStream = currentState.metadata().dataStreams().get(dataStreamName); assert dataStream != null; backingIndicesToRemove.addAll(dataStream.getIndices()); + backingIndicesToRemove.addAll(dataStream.getFailureIndices()); } // first delete the data streams and then the indices: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index eff40cb1dbe62..8b15d6a4b7bdf 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; @@ -33,6 +32,7 @@ import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -1168,7 +1168,7 @@ private void forceMergeIndex(ForceMergeRequest forceMergeRequest, ActionListener logger.info("Data stream lifecycle is issuing a request to force merge index [{}]", targetIndex); client.admin().indices().forceMerge(forceMergeRequest, new ActionListener<>() { @Override - public void onResponse(ForceMergeResponse forceMergeResponse) { + public void onResponse(BroadcastResponse forceMergeResponse) { if (forceMergeResponse.getFailedShards() > 0) { DefaultShardOperationFailedException[] failures = forceMergeResponse.getShardFailures(); String message = Strings.format( diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index 2b23b76670af2..3fe9ae0758a91 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.datastreams.lifecycle.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -26,7 +25,7 @@ */ public class DeleteDataStreamLifecycleAction { - public static final ActionType INSTANCE = ActionType.localOnly("indices:admin/data_stream/lifecycle/delete"); + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/lifecycle/delete"); private DeleteDataStreamLifecycleAction() {/* no instances */} @@ -56,11 +55,6 @@ public String[] getNames() { return names; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public String[] indices() { return names; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java index 9e13bd7e0a99b..676052f76d564 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java @@ -36,8 +36,7 @@ public class ExplainDataStreamLifecycleAction { public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/explain", - Response::new + "indices:admin/data_stream/lifecycle/explain" ); private ExplainDataStreamLifecycleAction() {/* no instances */} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java index d2e24479df347..8149e1a0df443 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java @@ -37,8 +37,7 @@ public class GetDataStreamLifecycleAction { public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/get", - Response::new + "indices:admin/data_stream/lifecycle/get" ); private GetDataStreamLifecycleAction() {/* no instances */} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java index c3444a67b847c..a30af402a9186 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java @@ -34,7 +34,7 @@ public class GetDataStreamLifecycleStatsAction extends ActionType { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java index 1a1af76315cc5..c40988f1de6c7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.datastreams.lifecycle.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -40,7 +39,7 @@ */ public class PutDataStreamLifecycleAction { - public static final ActionType INSTANCE = ActionType.localOnly("indices:admin/data_stream/lifecycle/put"); + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/lifecycle/put"); private PutDataStreamLifecycleAction() {/* no instances */} @@ -129,11 +128,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public String[] indices() { return names; diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 62d07467d5086..db0e3e5cd6258 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -493,6 +493,55 @@ public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntri assertEquals(3, routingPathList.size()); } + public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntriesMultiFields() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.SECONDS); + String mapping = """ + { + "_doc": { + "dynamic_templates": [ + { + "labels": { + "path_match": ["xprometheus.labels.*", "yprometheus.labels.*"], + "mapping": { + "type": "keyword", + "time_series_dimension": true, + "fields": { + "text": { + "type": "text" + } + } + } + } + } + ], + "properties": { + "host": { + "properties": { + "id": { + "type": "keyword", + "time_series_dimension": true + } + } + }, + "another_field": { + "type": "keyword" + } + } + } + } + """; + Settings result = generateTsdbSettings(mapping, now); + assertThat(result.size(), equalTo(3)); + assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); + assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); + assertThat( + IndexMetadata.INDEX_ROUTING_PATH.get(result), + containsInAnyOrder("host.id", "xprometheus.labels.*", "yprometheus.labels.*") + ); + List routingPathList = IndexMetadata.INDEX_ROUTING_PATH.get(result); + assertEquals(3, routingPathList.size()); + } + public void testGenerateRoutingPathFromDynamicTemplate_templateWithNoPathMatch() throws Exception { Instant now = Instant.now().truncatedTo(ChronoUnit.SECONDS); String mapping = """ diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java index 29c88b7f75463..a5c3b348b1f1b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; +import org.junit.Assume; import java.util.Collections; import java.util.List; @@ -55,6 +56,30 @@ public void testDeleteDataStream() { } } + public void testDeleteDataStreamWithFailureStore() { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + final String dataStreamName = "my-data-stream"; + final List otherIndices = randomSubsetOf(List.of("foo", "bar", "baz")); + + ClusterState cs = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>(dataStreamName, 2)), + otherIndices, + System.currentTimeMillis(), + Settings.EMPTY, + 1, + false, + true + ); + DeleteDataStreamAction.Request req = new DeleteDataStreamAction.Request(new String[] { dataStreamName }); + ClusterState newState = DeleteDataStreamTransportAction.removeDataStream(iner, cs, req, validator, Settings.EMPTY); + assertThat(newState.metadata().dataStreams().size(), equalTo(0)); + assertThat(newState.metadata().indices().size(), equalTo(otherIndices.size())); + for (String indexName : otherIndices) { + assertThat(newState.metadata().indices().get(indexName).getIndex().getName(), equalTo(indexName)); + } + } + public void testDeleteMultipleDataStreams() { String[] dataStreamNames = { "foo", "bar", "baz", "eggplant" }; ClusterState cs = DataStreamTestHelper.getClusterStateWithDataStreams( diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java index c24d386dcb26e..637fb44affb6f 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java @@ -215,6 +215,7 @@ public void testGetTimeSeriesMixedDataStream() { instant.toEpochMilli(), Settings.EMPTY, 0, + false, false ); DataStreamTestHelper.getClusterStateWithDataStream(mBuilder, dataStream1, List.of(new Tuple<>(twoHoursAgo, twoHoursAhead))); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index befa16573de23..15f526d0a06d6 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; import org.elasticsearch.action.admin.indices.rollover.RolloverConditions; @@ -27,6 +26,7 @@ import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -578,7 +578,7 @@ public void testForceMerge() throws Exception { // We want this test method to get fake force merge responses, because this is what triggers a cluster state update clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); @@ -748,7 +748,7 @@ public void testForceMergeRetries() throws Exception { clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { listener.onResponse( - new ForceMergeResponse( + new BroadcastResponse( 5, 5, 1, @@ -779,7 +779,7 @@ public void testForceMergeRetries() throws Exception { AtomicInteger forceMergeFailedCount = new AtomicInteger(0); clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 4, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 4, 0, List.of())); forceMergeFailedCount.incrementAndGet(); } }; @@ -800,7 +800,7 @@ public void testForceMergeRetries() throws Exception { // For the final data stream lifecycle run, we let forcemerge run normally clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; dataStreamLifecycleService.run(clusterService.state()); @@ -900,7 +900,7 @@ public void testForceMergeDedup() throws Exception { setState(clusterService, state); clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; for (int i = 0; i < 100; i++) { diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 6496930764ab8..22b541425b74f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -303,6 +303,11 @@ setup: name: failure-data-stream2 - is_true: acknowledged + - do: + indices.delete_index_template: + name: my-template4 + - is_true: acknowledged + --- "Create data stream with invalid name": - skip: @@ -530,6 +535,82 @@ setup: indices.get: index: $idx0name +--- +"Delete data stream with failure stores": + - skip: + # version: " - 8.11.99" + # reason: "data streams only supported in 8.12+" + version: all + reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/104348 + + - do: + allowed_warnings: + - "index template [my-template4] has index patterns [failure-data-stream1, failure-data-stream2] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" + indices.put_index_template: + name: my-template4 + body: + index_patterns: [ failure-data-stream1 ] + data_stream: + failure_store: true + + - do: + indices.create_data_stream: + name: failure-data-stream1 + - is_true: acknowledged + + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + + # save the backing index names for later use + - do: + indices.get_data_stream: + name: failure-data-stream1 + + - set: { data_streams.0.indices.0.index_name: idx0name } + - set: { data_streams.0.failure_indices.0.index_name: fs0name } + + - do: + indices.get: + index: ['.ds-failure-data-stream1-*000001', 'test_index'] + + - is_true: test_index.settings + - is_true: .$idx0name.settings + + - do: + indices.get_data_stream: {} + - match: { data_streams.0.name: failure-data-stream1 } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + indices.delete_data_stream: + name: failure-data-stream1 + - is_true: acknowledged + + - do: + catch: missing + indices.get: + index: $idx0name + + - do: + catch: missing + indices.get: + index: $fs0name + + - do: + indices.delete_index_template: + name: my-template4 + - is_true: acknowledged + --- "Delete data stream missing behaviour": - skip: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index f036ab549a94f..70c563d1d4510 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -260,3 +260,50 @@ - match: {test2.aliases: {}} - match: {test3.aliases: {}} +--- +"Test get alias with non-matching data streams": + - skip: + version: " - 8.12.1" + reason: "bugfix fixed from 8.12.1 and later" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [my-template] has index patterns [ds-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ ds-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + + - do: + indices.create_data_stream: + name: ds-first + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: ds-second + - is_true: acknowledged + + - do: + indices.update_aliases: + body: + actions: + - add: + index: ds-first + alias: my-alias + - is_true: acknowledged + + - do: + indices.get_alias: + name: my-al* + - match: {ds-first.aliases.my-alias: {}} + + - do: + indices.get_alias: + name: this-does-not-exist* + - is_false: ds-first.aliases.my-alias diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_require_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_require_data_stream.yml new file mode 100644 index 0000000000000..7aed1cbe0a636 --- /dev/null +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_require_data_stream.yml @@ -0,0 +1,140 @@ +--- +"Testing require_data_stream in index creation": + - skip: + version: " - 8.12.99" + reason: "require_data_stream was introduced in 8.13.0" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [ds-template] has index patterns [ds-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [ds-template] will take precedence during new index creation" + indices.put_index_template: + name: ds-template + body: + index_patterns: ds-* + template: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + field: + type: keyword + data_stream: {} + allow_auto_create: true + + - do: + index: + index: ds-test + require_data_stream: true + body: + '@timestamp': '2022-12-12' + foo: bar + + - do: + catch: /no matching index template with data stream template was found for it/ + index: + index: index-test + require_data_stream: true + body: + '@timestamp': '2022-12-12' + foo: bar + + - do: + index: + index: other-index + require_data_stream: false + body: + '@timestamp': '2022-12-12' + foo: bar + + - do: + catch: /is not a data stream/ + index: + index: other-index + require_data_stream: true + body: + '@timestamp': '2022-12-12' + foo: bar + +--- +"Testing require_data_stream in bulk requests": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/104774" + #version: " - 8.12.99" + #reason: "require_data_stream was introduced in 8.13.0" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [ds-template] has index patterns [ds-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [ds-template] will take precedence during new index creation" + indices.put_index_template: + name: ds-template + body: + index_patterns: ds-* + template: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + field: + type: keyword + data_stream: {} + allow_auto_create: true + + - do: + bulk: + refresh: true + require_data_stream: true + body: + - index: + _index: new_index_not_created + - f: 1 + - index: + _index: new_index_created + require_data_stream: false + - f: 2 + - index: + _index: ds-other + op_type: create + - "@timestamp": "2024-01-01" + - match: { errors: true } + - match: { items.0.index.status: 404 } + - match: { items.0.index.error.type: index_not_found_exception } + - match: { items.0.index.error.reason: "no such index [new_index_not_created] and the index creation request requires a data stream, but no matching index template with data stream template was found for it" } + - match: { items.1.index.result: created } + - match: { items.2.create.result: created } + + - do: + allowed_warnings: + - "index template [other-template] has index patterns [ds-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [other-template] will take precedence during new index creation" + indices.put_index_template: + name: other-template + body: + index_patterns: other-* + template: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + field: + type: keyword + allow_auto_create: true + + - do: + bulk: + refresh: true + require_data_stream: false + body: + - index: + _index: other-myindex + require_data_stream: true + op_type: create + - "@timestamp": "2024-01-01" + - match: { errors: true } + - match: { items.0.create.status: 404 } + - match: { items.0.create.error.type: index_not_found_exception } + - match: { items.0.create.error.reason: "no such index [other-myindex] and the index creation request requires a data stream, but no matching index template with data stream template was found for it" } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 2fe666c5f208c..8add2b0d05535 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -42,10 +42,7 @@ public class GrokProcessorGetAction { - static final ActionType INSTANCE = new ActionType<>( - "cluster:admin/ingest/processor/grok/get", - Response::new - ); + static final ActionType INSTANCE = new ActionType<>("cluster:admin/ingest/processor/grok/get"); private GrokProcessorGetAction() {/* no instances */} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index d9e7488953b38..dff65f1c7a1bc 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -83,6 +84,7 @@ public Map getProcessors(Processor.Parameters paramet @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java index d73eb1906ac5b..85affa225b882 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java @@ -23,22 +23,24 @@ public final class RenameProcessor extends AbstractProcessor { public static final String TYPE = "rename"; - private final TemplateScript.Factory field; private final TemplateScript.Factory targetField; private final boolean ignoreMissing; + private final boolean overrideEnabled; RenameProcessor( String tag, String description, TemplateScript.Factory field, TemplateScript.Factory targetField, - boolean ignoreMissing + boolean ignoreMissing, + boolean overrideEnabled ) { super(tag, description); this.field = field; this.targetField = targetField; this.ignoreMissing = ignoreMissing; + this.overrideEnabled = overrideEnabled; } TemplateScript.Factory getField() { @@ -53,6 +55,10 @@ boolean isIgnoreMissing() { return ignoreMissing; } + public boolean isOverrideEnabled() { + return overrideEnabled; + } + @Override public IngestDocument execute(IngestDocument document) { String path = document.renderTemplate(field); @@ -63,12 +69,13 @@ public IngestDocument execute(IngestDocument document) { throw new IllegalArgumentException("field [" + path + "] doesn't exist"); } } + // We fail here if the target field point to an array slot that is out of range. // If we didn't do this then we would fail if we set the value in the target_field // and then on failure processors would not see that value we tried to rename as we already // removed it. String target = document.renderTemplate(targetField); - if (document.hasField(target, true)) { + if (document.hasField(target, true) && overrideEnabled == false) { throw new IllegalArgumentException("field [" + target + "] already exists"); } @@ -115,7 +122,8 @@ public RenameProcessor create( scriptService ); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate, ignoreMissing); + boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", false); + return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate, ignoreMissing, overrideEnabled); } } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java index 3e3c7af964861..1cd0a0ead8785 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -83,6 +83,7 @@ public void testEscapeFields_valueField() throws Exception { null, new TestTemplateService.MockTemplateScript.Factory("foo"), new TestTemplateService.MockTemplateScript.Factory("foo.bar"), + false, false ); processor.execute(document); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index 2299081eb22cd..005cbc260f69a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -37,6 +37,7 @@ public void testCreate() throws Exception { assertThat(renameProcessor.getField().newInstance(Map.of()).execute(), equalTo("old_field")); assertThat(renameProcessor.getTargetField().newInstance(Map.of()).execute(), equalTo("new_field")); assertThat(renameProcessor.isIgnoreMissing(), equalTo(false)); + assertThat(renameProcessor.isOverrideEnabled(), equalTo(false)); } public void testCreateWithIgnoreMissing() throws Exception { @@ -52,6 +53,19 @@ public void testCreateWithIgnoreMissing() throws Exception { assertThat(renameProcessor.isIgnoreMissing(), equalTo(true)); } + public void testCreateWithEnableOverride() throws Exception { + Map config = new HashMap<>(); + config.put("field", "old_field"); + config.put("target_field", "new_field"); + config.put("override", true); + String processorTag = randomAlphaOfLength(10); + RenameProcessor renameProcessor = factory.create(null, processorTag, null, config); + assertThat(renameProcessor.getTag(), equalTo(processorTag)); + assertThat(renameProcessor.getField().newInstance(Map.of()).execute(), equalTo("old_field")); + assertThat(renameProcessor.getTargetField().newInstance(Map.of()).execute(), equalTo("new_field")); + assertThat(renameProcessor.isOverrideEnabled(), equalTo(true)); + } + public void testCreateNoFieldPresent() throws Exception { Map config = new HashMap<>(); config.put("target_field", "new_field"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index f472e9d9bacd4..9765320ef4d57 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -36,7 +36,7 @@ public void testRename() throws Exception { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); - Processor processor = createRenameProcessor(fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } @@ -54,7 +54,7 @@ public void testRenameArrayElement() throws Exception { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = createRenameProcessor("list.0", "item", false); + Processor processor = createRenameProcessor("list.0", "item", false, false); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -67,7 +67,7 @@ public void testRenameArrayElement() throws Exception { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = createRenameProcessor("list.0", "list.3", false); + processor = createRenameProcessor("list.0", "list.3", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -82,7 +82,7 @@ public void testRenameArrayElement() throws Exception { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -95,11 +95,11 @@ public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true, false); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); - Processor processor1 = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), true); + Processor processor1 = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), true, false); processor1.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -110,6 +110,7 @@ public void testRenameNewFieldAlreadyExists() throws Exception { Processor processor = createRenameProcessor( RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName, + false, false ); try { @@ -125,7 +126,7 @@ public void testRenameExistingFieldNullValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, (Object) null); String newFieldName = randomValueOtherThanMany(ingestDocument::hasField, () -> RandomDocumentPicks.randomFieldName(random())); - Processor processor = createRenameProcessor(fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false, false); processor.execute(ingestDocument); if (newFieldName.startsWith(fieldName + '.')) { assertThat(ingestDocument.getFieldValue(fieldName, Object.class), instanceOf(Map.class)); @@ -148,7 +149,7 @@ public void testRenameAtomicOperationSetFails() throws Exception { } }), "list", new Metadata.FieldProperty<>(Object.class, true, true, null)) ); - Processor processor = createRenameProcessor("list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -167,7 +168,7 @@ public void testRenameAtomicOperationRemoveFails() throws Exception { metadata, Map.of("list", new Metadata.FieldProperty<>(Object.class, false, true, null)) ); - Processor processor = createRenameProcessor("list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -182,30 +183,41 @@ public void testRenameLeafIntoBranch() throws Exception { Map source = new HashMap<>(); source.put("foo", "bar"); IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(source); - Processor processor1 = createRenameProcessor("foo", "foo.bar", false); + Processor processor1 = createRenameProcessor("foo", "foo.bar", false, false); processor1.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Map.of("bar", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar", String.class), equalTo("bar")); - Processor processor2 = createRenameProcessor("foo.bar", "foo.bar.baz", false); + Processor processor2 = createRenameProcessor("foo.bar", "foo.bar.baz", false, false); processor2.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Map.of("bar", Map.of("baz", "bar")))); assertThat(ingestDocument.getFieldValue("foo.bar", Map.class), equalTo(Map.of("baz", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar.baz", String.class), equalTo("bar")); - // for fun lets try to restore it (which don't allow today) - Processor processor3 = createRenameProcessor("foo.bar.baz", "foo", false); + // try to restore it (will fail, not allowed without the override flag) + Processor processor3 = createRenameProcessor("foo.bar.baz", "foo", false, false); Exception e = expectThrows(IllegalArgumentException.class, () -> processor3.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("field [foo] already exists")); } - private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing) { + public void testRenameOverride() throws Exception { + Map source = new HashMap<>(); + source.put("event.original", "existing_message"); + source.put("message", "new_message"); + IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(source); + Processor processor1 = createRenameProcessor("message", "event.original", false, true); + processor1.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("event.original", String.class), equalTo("new_message")); + } + + private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing, boolean overrideEnabled) { return new RenameProcessor( randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory(field), new TestTemplateService.MockTemplateScript.Factory(targetField), - ignoreMissing + ignoreMissing, + overrideEnabled ); } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index dcf201666dfeb..6f12087de7d5e 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -1,94 +1,185 @@ --- teardown: -- do: - ingest.delete_pipeline: - id: "my_pipeline" - ignore: 404 + - do: + indices.delete: + index: "test" + ignore_unavailable: true + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 --- "Test Drop Processor": -- do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "description" : "pipeline with drop", - "processors" : [ - { - "drop" : { - "if": "ctx.foo == 'bar'" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description" : "pipeline with drop", + "processors" : [ + { + "drop" : { + "if": "ctx.foo == 'bar'" + } } - } - ] + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "my_pipeline" + body: { + foo: "bar" } -- match: { acknowledged: true } - -- do: - index: - index: test - id: "1" - pipeline: "my_pipeline" - body: { - foo: "bar" - } - -- do: - index: - index: test - id: "2" - pipeline: "my_pipeline" - body: { - foo: "blub" - } - -- do: - catch: missing - get: - index: test - id: "1" -- match: { found: false } - -- do: - get: - index: test - id: "2" -- match: { _source.foo: "blub" } + + - do: + index: + index: test + id: "2" + pipeline: "my_pipeline" + body: { + foo: "blub" + } + + - do: + catch: missing + get: + index: test + id: "1" + - match: { found: false } + + - do: + get: + index: test + id: "2" + - match: { _source.foo: "blub" } --- "Test Drop Processor On Failure": -- do: - ingest.put_pipeline: - id: "my_pipeline_with_failure" - body: > - { - "description" : "pipeline with on failure drop", - "processors": [ + - do: + ingest.put_pipeline: + id: "my_pipeline_with_failure" + body: > + { + "description" : "pipeline with on failure drop", + "processors": [ + { + "fail": { + "message": "failed", + "on_failure": [ + { + "drop": {} + } + ] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "3" + pipeline: "my_pipeline_with_failure" + body: { + foo: "bar" + } + + - do: + catch: missing + get: + index: test + id: "3" + +--- +"Test Drop Processor with Upsert (_bulk)": + - skip: + version: ' - 8.12.99' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "processors": [ { - "fail": { - "message": "failed", - "on_failure": [ - { - "drop": {} - } - ] + "drop": { } } ] - } -- match: { acknowledged: true } - -- do: - index: - index: test - id: "3" - pipeline: "my_pipeline_with_failure" - body: { - foo: "bar" - } - -- do: - catch: missing - get: - index: test - id: "3" + } + - match: { acknowledged: true } + + - do: + bulk: + refresh: true + pipeline: "my_pipeline" + body: + - update: + _index: test + _id: 4 + - '{"upsert":{"some":"fields"},"script":"ctx"}' + - match: { errors: false } + - match: { items.0.update._index: test } + - match: { items.0.update._id: "4" } + - match: { items.0.update._version: -3 } + - match: { items.0.update.result: noop } + - match: { items.0.update.status: 200 } + + - do: + catch: missing + get: + index: test + id: "4" + +--- +"Test Drop Processor with Upsert (_update)": + - skip: + version: ' - 8.12.99' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "processors": [ + { + "drop": { + } + } + ] + } + - match: { acknowledged: true } + + - do: + indices.create: + index: test + body: + settings: + index: + default_pipeline: "my_pipeline" + + - do: + update: + index: test + id: "5" + body: + script: + source: "ctx._source.foo = 'bar'" + upsert: + foo: "bar" + + - match: { _index: test } + - match: { _id: "5" } + - match: { result: noop } + - do: + catch: missing + get: + index: test + id: "5" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml index 5e38f09dbd024..26a0d5eef50ae 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml @@ -5,12 +5,45 @@ teardown: id: "1" ignore: 404 +--- +"Test Rename Processor": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "rename" : { + "field" : "foo", + "target_field": "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "1" + body: { + foo: "test" + } + + - do: + get: + index: test + id: "1" + - match: { _source.bar: "test" } + --- "Test Rename Processor with template snippets and ignore_missing": - do: ingest.put_pipeline: id: "1" - body: > + body: > { "processors": [ { @@ -38,3 +71,40 @@ teardown: index: test id: "1" - match: { _source.message: "test" } + +--- +"Test Rename Processor with override": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "rename" : { + "field" : "message", + "target_field": "event.original", + "override": true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "1" + body: { + message: "overridden original message", + event: { + original: "original message" + } + } + + - do: + get: + index: test + id: "1" + - match: { _source.event.original: "overridden original message" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index e2c331deae340..341adaa781ef0 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -1,5 +1,9 @@ --- teardown: + - do: + indices.delete: + index: "test" + ignore_unavailable: true - do: ingest.delete_pipeline: id: "my_pipeline" @@ -10,7 +14,7 @@ teardown: - do: ingest.put_pipeline: id: "my_pipeline" - body: > + body: > { "description": "_description", "processors": [ @@ -36,7 +40,7 @@ teardown: - do: ingest.put_pipeline: id: "my_pipeline" - body: > + body: > { "description": "_description", "processors": [ @@ -69,3 +73,87 @@ teardown: index: test id: "1" - match: { _source.error_message: "fail_processor_ran" } + +--- +"Test Fail Processor with Upsert (bulk)": + - skip: + version: ' - 8.12.99' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "processors": [ + { + "fail": { + "message": "error-message" + } + } + ] + } + - match: { acknowledged: true } + + - do: + bulk: + refresh: true + pipeline: "my_pipeline" + body: + - update: + _index: test + _id: 3 + - '{"upsert":{"some":"fields"},"script":"ctx"}' + - match: { errors: true } + - match: { items.0.update._index: test } + - match: { items.0.update._id: "3" } + - match: { items.0.update.status: 500 } + - match: { items.0.update.error.type: fail_processor_exception } + - match: { items.0.update.error.reason: /error-message/ } + + - do: + catch: missing + get: + index: test + id: "3" + +--- +"Test Fail Processor with Upsert (_update)": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "processors": [ + { + "fail": { + "message": "error-message" + } + } + ] + } + - match: { acknowledged: true } + + - do: + indices.create: + index: test + body: + settings: + index: + default_pipeline: "my_pipeline" + + - do: + update: + index: test + id: "4" + body: + script: + source: "ctx._source.foo = 'bar'" + upsert: + foo: "bar" + catch: /error-message/ + + - do: + catch: missing + get: + index: test + id: "4" diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java index 30c0fcb74833c..ff75325624412 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java @@ -18,12 +18,26 @@ import java.util.function.Function; /** - * The in-memory cache for the geoip data. There should only be 1 instance of this class.. + * The in-memory cache for the geoip data. There should only be 1 instance of this class. * This cache differs from the maxmind's {@link NodeCache} such that this cache stores the deserialized Json objects to avoid the * cost of deserialization for each lookup (cached or not). This comes at slight expense of higher memory usage, but significant * reduction of CPU usage. */ final class GeoIpCache { + + /** + * Internal-only sentinel object for recording that a result from the geoip database was null (i.e. there was no result). By caching + * this no-result we can distinguish between something not being in the cache because we haven't searched for that data yet, versus + * something not being in the cache because the data doesn't exist in the database. + */ + // visible for testing + static final AbstractResponse NO_RESULT = new AbstractResponse() { + @Override + public String toString() { + return "AbstractResponse[NO_RESULT]"; + } + }; + private final Cache cache; // package private for testing @@ -40,18 +54,27 @@ T putIfAbsent( String databasePath, Function retrieveFunction ) { - // can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) CacheKey cacheKey = new CacheKey(ip, databasePath); // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. AbstractResponse response = cache.get(cacheKey); + + // populate the cache for this key, if necessary if (response == null) { response = retrieveFunction.apply(ip); - if (response != null) { - cache.put(cacheKey, response); + // if the response from the database was null, then use the no-result sentinel value + if (response == null) { + response = NO_RESULT; } + // store the result or no-result in the cache + cache.put(cacheKey, response); + } + + if (response == NO_RESULT) { + return null; // the no-result sentinel is an internal detail, don't expose it + } else { + return (T) response; } - return (T) response; } // only useful for testing diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 58d55a5a8e6fe..42dddf4c83ef3 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -172,10 +172,8 @@ private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) t geoData = retrieveCityGeoData(geoIpDatabase, ipAddress); } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { geoData = retrieveCountryGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); - } else { throw new ElasticsearchParseException( "Unsupported database type [" + geoIpDatabase.getDatabaseType() + "]", diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 30ecc96a3171c..53c8db638923f 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -148,6 +148,7 @@ public List> getPersistentTasksExecutor( @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java index 9921b144afcac..f9b1d8c637f68 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java @@ -32,7 +32,7 @@ public class GeoIpDownloaderStatsAction { - public static final ActionType INSTANCE = new ActionType<>("cluster:monitor/ingest/geoip/stats", Response::new); + public static final ActionType INSTANCE = new ActionType<>("cluster:monitor/ingest/geoip/stats"); private GeoIpDownloaderStatsAction() {/* no instances */} diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java index c7dbee47ea823..cbb41dfa02c5f 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java @@ -318,7 +318,7 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk) Map> requestMap = new HashMap<>(); for (int i = firstChunk; i <= lastChunk; i++) { byte[] chunk = data.get(i - firstChunk); - SearchHit hit = new SearchHit(i); + SearchHit hit = SearchHit.unpooled(i); try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { builder.map(Map.of("data", chunk)); builder.flush(); @@ -328,7 +328,7 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk) throw new UncheckedIOException(ex); } - SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); + SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); SearchResponse searchResponse = new SearchResponse(hits, null, null, false, null, null, 0, null, 1, 1, 0, 1L, null, null); toRelease.add(searchResponse::decRef); @SuppressWarnings("unchecked") diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java index 438353be737b9..d049ca3f9bcd0 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java @@ -13,6 +13,10 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.test.ESTestCase; +import java.net.InetAddress; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; + import static org.mockito.Mockito.mock; public class GeoIpCacheTests extends ESTestCase { @@ -36,6 +40,23 @@ public void testCachesAndEvictsResults() { assertNotSame(response1, cache.get(InetAddresses.forString("127.0.0.1"), "path/to/db")); } + public void testCachesNoResult() { + GeoIpCache cache = new GeoIpCache(1); + final AtomicInteger count = new AtomicInteger(0); + Function countAndReturnNull = (ip) -> { + count.incrementAndGet(); + return null; + }; + + AbstractResponse response = cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), "path/to/db", countAndReturnNull); + assertNull(response); + assertNull(cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), "path/to/db", countAndReturnNull)); + assertEquals(1, count.get()); + + // the cached value is not actually *null*, it's the NO_RESULT sentinel + assertSame(GeoIpCache.NO_RESULT, cache.get(InetAddresses.forString("127.0.0.1"), "path/to/db")); + } + public void testCacheKey() { GeoIpCache cache = new GeoIpCache(2); AbstractResponse response1 = mock(AbstractResponse.class); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index 7fdce03252687..915d54c91b259 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -17,13 +17,12 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -35,6 +34,7 @@ import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -79,7 +79,7 @@ public class GeoIpDownloaderTests extends ESTestCase { public void setup() { httpClient = mock(HttpClient.class); clusterService = mock(ClusterService.class); - threadPool = new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test").build()); + threadPool = new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test").build(), MeterRegistry.NOOP); when(clusterService.getClusterSettings()).thenReturn( new ClusterSettings( Settings.EMPTY, @@ -178,28 +178,34 @@ public int read() throws IOException { } public void testIndexChunksNoData() throws IOException { - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); InputStream empty = new ByteArrayInputStream(new byte[0]); assertEquals(0, geoIpDownloader.indexChunks("test", empty, 0, "d41d8cd98f00b204e9800998ecf8427e", 0)); } public void testIndexChunksMd5Mismatch() { - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); IOException exception = expectThrows( IOException.class, @@ -232,14 +238,17 @@ public void testIndexChunks() throws IOException { assertEquals(chunk + 15, source.get("chunk")); listener.onResponse(mock(IndexResponse.class)); }); - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); InputStream big = new ByteArrayInputStream(bigArray); assertEquals(17, geoIpDownloader.indexChunks("test", big, 15, "a67563dfa8f3cba8b8cff61eb989a749", 0)); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index 9bdabcede8ec6..b867fcfb905ea 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -12,7 +12,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; @@ -26,7 +25,6 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Iterator; @@ -204,28 +202,6 @@ static final class Fields { static final String STATUS = "status"; } - public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { - // The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response - MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser); - try { - org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); - Item[] templateResponses = new Item[responses.length]; - int i = 0; - for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) { - SearchTemplateResponse stResponse = null; - if (item.getResponse() != null) { - stResponse = new SearchTemplateResponse(); - stResponse.setResponse(item.getResponse()); - item.getResponse().incRef(); - } - templateResponses[i++] = new Item(stResponse, item.getFailure()); - } - return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis()); - } finally { - mSearchResponse.decRef(); - } - } - @Override public String toString() { return Strings.toString(this); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index b9996484c5bc0..b7f5035122dfe 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -33,13 +34,9 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin, SearchPlugin { - public static final ActionType SEARCH_TEMPLATE_ACTION = new ActionType<>( - "indices:data/read/search/template", - SearchTemplateResponse::new - ); + public static final ActionType SEARCH_TEMPLATE_ACTION = new ActionType<>("indices:data/read/search/template"); public static final ActionType MULTI_SEARCH_TEMPLATE_ACTION = new ActionType<>( - "indices:data/read/msearch/template", - MultiSearchTemplateResponse::new + "indices:data/read/msearch/template" ); @Override @@ -58,6 +55,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -66,7 +64,7 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestSearchTemplateAction(), + new RestSearchTemplateAction(namedWriteableRegistry), new RestMultiSearchTemplateAction(settings), new RestRenderSearchTemplateAction() ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 0dbb810902b44..cfd726fd96fc3 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -33,6 +34,12 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestSearchTemplateAction(NamedWriteableRegistry namedWriteableRegistry) { + this.namedWriteableRegistry = namedWriteableRegistry; + } + @Override public List routes() { return List.of( @@ -62,7 +69,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchRequest, request, null, - client.getNamedWriteableRegistry(), + namedWriteableRegistry, size -> searchRequest.source().size(size) ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 34e771c51e4f4..39da4066a7859 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.RestStatus; @@ -22,14 +21,10 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.io.InputStream; -import java.util.Map; public class SearchTemplateResponse extends ActionResponse implements ToXContentObject { public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); @@ -108,31 +103,6 @@ public boolean hasReferences() { return refCounted.hasReferences(); } - public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { - SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); - Map contentAsMap = parser.map(); - - if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) { - Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName()); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); - searchTemplateResponse.setSource(BytesReference.bytes(builder)); - } else { - XContentType contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); - try ( - XContentParser searchResponseParser = XContentHelper.createParserNotCompressed( - XContentParserConfiguration.EMPTY.withRegistry(parser.getXContentRegistry()) - .withDeprecationHandler(parser.getDeprecationHandler()), - BytesReference.bytes(builder), - contentType - ) - ) { - searchTemplateResponse.setResponse(SearchResponse.fromXContent(searchResponseParser)); - } - } - return searchTemplateResponse; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java index 03f2fbd3e81a7..86f23397cfadb 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; @@ -95,8 +96,26 @@ private static MultiSearchTemplateResponse createTestInstanceWithFailures() { } @Override - protected MultiSearchTemplateResponse doParseInstance(XContentParser parser) throws IOException { - return MultiSearchTemplateResponse.fromXContext(parser); + protected MultiSearchTemplateResponse doParseInstance(XContentParser parser) { + // The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response + MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser); + try { + org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); + MultiSearchTemplateResponse.Item[] templateResponses = new MultiSearchTemplateResponse.Item[responses.length]; + int i = 0; + for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) { + SearchTemplateResponse stResponse = null; + if (item.getResponse() != null) { + stResponse = new SearchTemplateResponse(); + stResponse.setResponse(item.getResponse()); + item.getResponse().incRef(); + } + templateResponses[i++] = new MultiSearchTemplateResponse.Item(stResponse, item.getFailure()); + } + return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis()); + } finally { + mSearchResponse.decRef(); + } } @Override diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 4896584d7aadf..7c67938044c49 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -200,6 +200,7 @@ public void testSimple() throws IOException { assertThat(TemplateScript.execute(), equalTo("{\"match_all\":{}}")); } + @SuppressWarnings("deprecation") // GeneralScriptException public void testDetectMissingParam() { Map scriptOptions = Map.ofEntries(Map.entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, "true")); @@ -291,7 +292,6 @@ public String toString() { * * If we change this, we should *know* that we're changing it. */ - @SuppressWarnings({ "deprecation", "removal" }) public void testReflection() { Map vars = Map.of("obj", new TestReflection()); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 4e30d87b6a174..4cd14fa97d710 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -8,27 +8,29 @@ package org.elasticsearch.script.mustache; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.junit.Before; -import org.mockito.Mockito; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestSearchTemplateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(SearchTemplateResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(SearchTemplateResponse.class)); + controller().registerHandler(new RestSearchTemplateAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); } public void testTypeInPath() { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 73c8887669a02..f41c2533a020f 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchResponseUtils; @@ -20,9 +21,11 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Map; import java.util.function.Predicate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -42,7 +45,28 @@ protected SearchTemplateResponse createTestInstance() { @Override protected SearchTemplateResponse doParseInstance(XContentParser parser) throws IOException { - return SearchTemplateResponse.fromXContent(parser); + SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); + Map contentAsMap = parser.map(); + + if (contentAsMap.containsKey(SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName())) { + Object source = contentAsMap.get(SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName()); + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); + searchTemplateResponse.setSource(BytesReference.bytes(builder)); + } else { + XContentType contentType = parser.contentType(); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); + try ( + XContentParser searchResponseParser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(parser.getXContentRegistry()) + .withDeprecationHandler(parser.getDeprecationHandler()), + BytesReference.bytes(builder), + contentType + ) + ) { + searchTemplateResponse.setResponse(SearchResponse.fromXContent(searchResponseParser)); + } + } + return searchTemplateResponse; } /** @@ -158,12 +182,12 @@ public void testSourceToXContent() throws IOException { } public void testSearchResponseToXContent() throws IOException { - SearchHit hit = new SearchHit(1, "id"); + SearchHit hit = SearchHit.unpooled(1, "id"); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; SearchResponse searchResponse = new SearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + SearchHits.unpooled(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), null, null, false, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorCauseWrapper.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorCauseWrapper.java index 308d6223c666e..c1e1012eb3381 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorCauseWrapper.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorCauseWrapper.java @@ -36,7 +36,7 @@ private ErrorCauseWrapper(Throwable realCause) { this.realCause = realCause; } - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + protected XContentBuilder toXContent(XContentBuilder builder, Params params, int nestedLevel) throws IOException { builder.field("type", getExceptionName(realCause)); builder.field("reason", realCause.getMessage()); return builder; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 62302331b38d8..f9deddd5f4e85 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -160,6 +161,7 @@ public List> getContexts() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index 376278f8f0c52..6d88ff1e8db6a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -54,7 +54,7 @@ */ public class PainlessContextAction { - public static final ActionType INSTANCE = new ActionType<>("cluster:admin/scripts/painless/context", Response::new); + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/scripts/painless/context"); private static final String SCRIPT_CONTEXT_NAME_PARAM = "context"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 7393dff40fa11..3b67b76f59a31 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -24,11 +24,11 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -116,7 +116,8 @@ public class PainlessExecuteAction { - public static final ActionType INSTANCE = new ActionType<>("cluster:admin/scripts/painless/execute", Response::new); + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/scripts/painless/execute"); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>(INSTANCE.name(), Response::new); private PainlessExecuteAction() {/* no instances */} @@ -528,9 +529,9 @@ protected void doExecute(Task task, Request request, ActionListener li } else { // forward to remote cluster String clusterAlias = request.getContextSetup().getClusterAlias(); - Client remoteClusterClient = transportService.getRemoteClusterService() - .getRemoteClusterClient(threadPool, clusterAlias, EsExecutors.DIRECT_EXECUTOR_SERVICE); - remoteClusterClient.admin().cluster().execute(PainlessExecuteAction.INSTANCE, request, listener); + transportService.getRemoteClusterService() + .getRemoteClusterClient(clusterAlias, EsExecutors.DIRECT_EXECUTOR_SERVICE) + .execute(PainlessExecuteAction.REMOTE_TYPE, request, listener); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index abd2a4c8fa622..e1fbc2e149441 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -265,7 +265,7 @@ protected IngestScriptSupport ingestScriptSupport() { } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> ((BytesRef) v).utf8ToString(); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java index 13927962e5d58..efdf3c09bbe92 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java @@ -438,7 +438,7 @@ public List invalidExample() throws IOException { } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> (Number) v; } @@ -463,7 +463,11 @@ public void testEncodeDecodeExactScalingFactor() { public void testEncodeDecodeNoSaturation() { double scalingFactor = randomValue(); double unsaturated = randomDoubleBetween(Long.MIN_VALUE / scalingFactor, Long.MAX_VALUE / scalingFactor, true); - assertThat(encodeDecode(unsaturated, scalingFactor), equalTo(Math.round(unsaturated * scalingFactor) / scalingFactor)); + assertEquals( + encodeDecode(unsaturated, scalingFactor), + Math.round(unsaturated * scalingFactor) / scalingFactor, + unsaturated * 1e-10 + ); } /** diff --git a/modules/parent-join/src/main/java/module-info.java b/modules/parent-join/src/main/java/module-info.java index ff63bfaa62f25..b3ac78e1fe2b6 100644 --- a/modules/parent-join/src/main/java/module-info.java +++ b/modules/parent-join/src/main/java/module-info.java @@ -12,6 +12,4 @@ requires org.elasticsearch.xcontent; requires org.apache.lucene.core; requires org.apache.lucene.join; - - provides org.elasticsearch.plugins.spi.NamedXContentProvider with org.elasticsearch.join.spi.ParentJoinNamedXContentProvider; } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java deleted file mode 100644 index 0ad39b1df59a9..0000000000000 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.join.aggregations; - -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -public class ParsedChildren extends ParsedSingleBucketAggregation implements Children { - - @Override - public String getType() { - return ChildrenAggregationBuilder.NAME; - } - - public static ParsedChildren fromXContent(XContentParser parser, final String name) throws IOException { - return parseXContent(parser, new ParsedChildren(), name); - } -} diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedParent.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedParent.java deleted file mode 100644 index b61fa9c08453c..0000000000000 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedParent.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.join.aggregations; - -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -public class ParsedParent extends ParsedSingleBucketAggregation implements Parent { - - @Override - public String getType() { - return ParentAggregationBuilder.NAME; - } - - public static ParsedParent fromXContent(XContentParser parser, final String name) throws IOException { - return parseXContent(parser, new ParsedParent(), name); - } -} diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/spi/ParentJoinNamedXContentProvider.java b/modules/parent-join/src/main/java/org/elasticsearch/join/spi/ParentJoinNamedXContentProvider.java deleted file mode 100644 index 7d6392f9bc172..0000000000000 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/spi/ParentJoinNamedXContentProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.join.spi; - -import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder; -import org.elasticsearch.join.aggregations.ParentAggregationBuilder; -import org.elasticsearch.join.aggregations.ParsedChildren; -import org.elasticsearch.join.aggregations.ParsedParent; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; - -public class ParentJoinNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - ParseField parseFieldChildren = new ParseField(ChildrenAggregationBuilder.NAME); - ParseField parseFieldParent = new ParseField(ParentAggregationBuilder.NAME); - ContextParser contextParserChildren = (p, name) -> ParsedChildren.fromXContent(p, (String) name); - ContextParser contextParserParent = (p, name) -> ParsedParent.fromXContent(p, (String) name); - return Arrays.asList( - new NamedXContentRegistry.Entry(Aggregation.class, parseFieldChildren, contextParserChildren), - new NamedXContentRegistry.Entry(Aggregation.class, parseFieldParent, contextParserParent) - ); - } -} diff --git a/modules/parent-join/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/modules/parent-join/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider deleted file mode 100644 index 48687c21c3250..0000000000000 --- a/modules/parent-join/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ /dev/null @@ -1 +0,0 @@ -org.elasticsearch.join.spi.ParentJoinNamedXContentProvider \ No newline at end of file diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java index bb8c9fa168332..3cb674ae6ddee 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java @@ -8,15 +8,10 @@ package org.elasticsearch.join.aggregations; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import java.util.List; import java.util.Map; @@ -28,18 +23,6 @@ protected SearchPlugin registerPlugin() { return new ParentJoinPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(ChildrenAggregationBuilder.NAME), - (p, c) -> ParsedChildren.fromXContent(p, (String) c) - ) - ); - } - @Override protected InternalChildren createTestInstance( String name, @@ -54,9 +37,4 @@ protected InternalChildren createTestInstance( protected void extraAssertReduced(InternalChildren reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedChildren.class; - } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalParentTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalParentTests.java index 25e8315756048..31ae09ab5a14b 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalParentTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalParentTests.java @@ -8,15 +8,10 @@ package org.elasticsearch.join.aggregations; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import org.elasticsearch.xcontent.NamedXContentRegistry.Entry; -import org.elasticsearch.xcontent.ParseField; import java.util.List; import java.util.Map; @@ -28,14 +23,6 @@ protected SearchPlugin registerPlugin() { return new ParentJoinPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new Entry(Aggregation.class, new ParseField(ParentAggregationBuilder.NAME), (p, c) -> ParsedParent.fromXContent(p, (String) c)) - ); - } - @Override protected InternalParent createTestInstance( String name, @@ -50,9 +37,4 @@ protected InternalParent createTestInstance( protected void extraAssertReduced(InternalParent reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedParent.class; - } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 4e3d9baaf5c92..138007c104d2b 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -97,7 +97,7 @@ public void process(HitContext hit) throws IOException { BytesReference document = percolateQuery.getDocuments().get(slot); leafStoredFields.advanceTo(slot); HitContext subContext = new HitContext( - new SearchHit(slot, "unknown"), + SearchHit.unpooled(slot, "unknown"), percolatorLeafReaderContext, slot, leafStoredFields.storedFields(), diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index b65d966bd6551..82ec63b785e56 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -56,7 +56,7 @@ public void testHitsExecute() throws Exception { LeafReaderContext context = reader.leaves().get(0); // A match: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); @@ -87,7 +87,7 @@ public void testHitsExecute() throws Exception { // No match: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); @@ -117,7 +117,7 @@ public void testHitsExecute() throws Exception { // No query: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> null; MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index 814abcf02c569..381fe2cd7a77e 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -32,7 +32,7 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { - public static final ActionType ACTION = new ActionType<>("indices:data/read/rank_eval", RankEvalResponse::new); + public static final ActionType ACTION = new ActionType<>("indices:data/read/rank_eval"); @Override public List> getActions() { @@ -42,6 +42,7 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index cc7397637e04a..061d8292b3e5f 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -14,21 +14,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; /** * Returns the results for a {@link RankEvalRequest}.
@@ -111,37 +103,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } - - private static final ParseField DETAILS_FIELD = new ParseField("details"); - private static final ParseField FAILURES_FIELD = new ParseField("failures"); - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "rank_eval_response", - true, - a -> new RankEvalResponse( - (Double) a[0], - ((List) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())), - ((List>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) - ) - ); - static { - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> EvalQueryQuality.fromXContent(p, n), - DETAILS_FIELD - ); - PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.nextToken(), p); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); - Tuple tuple = new Tuple<>(n, ElasticsearchException.failureFromXContent(p)); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); - return tuple; - }, FAILURES_FIELD); - - } - - public static RankEvalResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.apply(parser, null); - } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java index d58c15d4efd74..f57c02bcdcc22 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java @@ -33,12 +33,12 @@ public class RatedSearchHit implements Writeable, ToXContentObject { private final OptionalInt rating; public RatedSearchHit(SearchHit searchHit, OptionalInt rating) { - this.searchHit = searchHit; + this.searchHit = searchHit.asUnpooled(); this.rating = rating; } RatedSearchHit(StreamInput in) throws IOException { - this(SearchHit.readFrom(in), in.readBoolean() ? OptionalInt.of(in.readVInt()) : OptionalInt.empty()); + this(SearchHit.readFrom(in, false), in.readBoolean() ? OptionalInt.of(in.readVInt()) : OptionalInt.empty()); } @Override diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index d4ec7ba9b9ef5..d4d58c3c0ae71 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -21,10 +21,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -39,7 +43,9 @@ import java.util.List; import java.util.Map; import java.util.OptionalInt; +import java.util.function.Function; import java.util.function.Predicate; +import java.util.stream.Collectors; import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; @@ -49,6 +55,32 @@ public class RankEvalResponseTests extends ESTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "rank_eval_response", + true, + a -> new RankEvalResponse( + (Double) a[0], + ((List) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())), + ((List>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) + ) + ); + static { + PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD); + PARSER.declareNamedObjects( + ConstructingObjectParser.optionalConstructorArg(), + (p, c, n) -> EvalQueryQuality.fromXContent(p, n), + new ParseField("details") + ); + PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.nextToken(), p); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); + Tuple tuple = new Tuple<>(n, ElasticsearchException.failureFromXContent(p)); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); + return tuple; + }, new ParseField("failures")); + } + private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] { new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)), new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT), @@ -117,7 +149,7 @@ public void testXContentParsing() throws IOException { BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); RankEvalResponse parsedItem; try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { - parsedItem = RankEvalResponse.fromXContent(parser); + parsedItem = PARSER.apply(parser, null); assertNull(parser.nextToken()); } assertNotSame(testItem, parsedItem); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index c5a09d67d94d0..c7ad2f2ea4bb5 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.rankeval; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -123,6 +124,8 @@ public void testXContentRoundtrip() throws IOException { } public void testXContentParsingIsNotLenient() throws IOException { + assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); + RatedRequest testItem = createTestItem(randomBoolean()); XContentType xContentType = randomFrom(XContentType.values()); BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java index 982d1afcf6dd3..95ee2a1ae2d6f 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.rankeval; +import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; @@ -42,6 +43,8 @@ public final class TransportRankEvalActionTests extends ESTestCase { * Test that request parameters like indicesOptions or searchType from ranking evaluation request are transfered to msearch request */ public void testTransferRequestParameters() throws Exception { + assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); + String indexName = "test_index"; List specifications = new ArrayList<>(); specifications.add( diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java index 7dad062ab3bca..37de70ded462f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java @@ -209,7 +209,7 @@ public void testDeleteByQuery() throws Exception { .addSort(SORTING_FIELD, SortOrder.DESC), response -> { // Modify a subset of the target documents concurrently - final List originalDocs = Arrays.asList(response.getHits().getHits()); + final List originalDocs = Arrays.asList(response.getHits().asUnpooled().getHits()); docsModifiedConcurrently.addAll(randomSubsetOf(finalConflictingOps, originalDocs)); } ); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index fcea4618f4cd4..7a2166e5860b4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; @@ -24,9 +23,11 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.VersionType; @@ -48,13 +49,11 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; @@ -92,7 +91,7 @@ public abstract class AbstractAsyncBulkByScrollAction< protected final Request mainRequest; private final AtomicLong startTime = new AtomicLong(-1); - private final Set destinationIndices = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set destinationIndices = ConcurrentCollections.newConcurrentSet(); private final ParentTaskAssigningClient searchClient; private final ParentTaskAssigningClient bulkClient; @@ -554,9 +553,9 @@ void refreshAndFinish(List indexingFailures, List search RefreshRequest refresh = new RefreshRequest(); refresh.indices(destinationIndices.toArray(new String[destinationIndices.size()])); logger.debug("[{}]: refreshing", task.getId()); - bulkClient.admin().indices().refresh(refresh, new ActionListener() { + bulkClient.admin().indices().refresh(refresh, new ActionListener<>() { @Override - public void onResponse(RefreshResponse response) { + public void onResponse(BroadcastResponse response) { finishHim(null, indexingFailures, searchFailures, timedOut); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java index 8e7fab68ac697..48c50450656f3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java @@ -39,10 +39,15 @@ protected AbstractBaseReindexRestHandler(A action) { this.action = action; } - protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) - throws IOException { + protected RestChannelConsumer doPrepareRequest( + RestRequest request, + NamedWriteableRegistry namedWriteableRegistry, + NodeClient client, + boolean includeCreated, + boolean includeUpdated + ) throws IOException { // Build the internal request - Request internal = setCommonOptions(request, buildRequest(request, client.getNamedWriteableRegistry())); + Request internal = setCommonOptions(request, buildRequest(request, namedWriteableRegistry)); // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index b07eb1b158087..42ff1fda6e74d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -42,10 +42,7 @@ public class ReindexPlugin extends Plugin implements ActionPlugin { public static final String NAME = "reindex"; - public static final ActionType RETHROTTLE_ACTION = new ActionType<>( - "cluster:admin/reindex/rethrottle", - ListTasksResponse::new - ); + public static final ActionType RETHROTTLE_ACTION = new ActionType<>("cluster:admin/reindex/rethrottle"); @Override public List> getActions() { @@ -67,6 +64,7 @@ public List getNamedWriteables() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -75,9 +73,9 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestReindexAction(), - new RestUpdateByQueryAction(), - new RestDeleteByQueryAction(), + new RestReindexAction(namedWriteableRegistry), + new RestUpdateByQueryAction(namedWriteableRegistry), + new RestDeleteByQueryAction(namedWriteableRegistry), new RestRethrottleAction(nodesInCluster) ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index 6f225556144c9..99bd0c51f3084 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -29,8 +29,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - public RestDeleteByQueryAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestDeleteByQueryAction(NamedWriteableRegistry namedWriteableRegistry) { super(DeleteByQueryAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -51,7 +54,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, false, false); + return doPrepareRequest(request, namedWriteableRegistry, client, false, false); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java index 66ef3e028280b..44cbe4712455f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java @@ -32,8 +32,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestReindexAction extends AbstractBaseReindexRestHandler implements RestRequestFilter { - public RestReindexAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestReindexAction(NamedWriteableRegistry namedWriteableRegistry) { super(ReindexAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -48,7 +51,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, true, true); + return doPrepareRequest(request, namedWriteableRegistry, client, true, true); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 50a2b7de6db39..b99e5acbd411d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -30,8 +30,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - public RestUpdateByQueryAction() { + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestUpdateByQueryAction(NamedWriteableRegistry namedWriteableRegistry) { super(UpdateByQueryAction.INSTANCE); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -51,7 +54,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, client, false, true); + return doPrepareRequest(request, namedWriteableRegistry, client, false, true); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java index c3cf7cf62f925..c40a4f72bc133 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java @@ -567,8 +567,8 @@ protected RequestWrapper buildRequest(Hit doc) { action.start(); // create a simulated response. - SearchHit hit = new SearchHit(0, "id").sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits( + SearchHit hit = SearchHit.unpooled(0, "id").sourceRef(new BytesArray("{}")); + SearchHits hits = SearchHits.unpooled( IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java index b211f7d92f51f..a2911090ab931 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -21,6 +22,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Operation.Origin; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequestBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; @@ -80,15 +82,17 @@ public void clearAllowedOperations() { * Executes the cancellation test */ private void testCancel( - String action, + ActionType action, AbstractBulkByScrollRequestBuilder builder, CancelAssertion assertion, Matcher taskDescriptionMatcher ) throws Exception { createIndex(INDEX); - + // Scroll by 1 so that cancellation is easier to control + builder.source().setSize(1); + AbstractBulkByScrollRequest request = builder.request(); // Total number of documents created for this test (~10 per primary shard per slice) - int numDocs = getNumShards(INDEX).numPrimaries * 10 * builder.request().getSlices(); + int numDocs = getNumShards(INDEX).numPrimaries * 10 * request.getSlices(); ALLOWED_OPERATIONS.release(numDocs); logger.debug("setting up [{}] docs", numDocs); @@ -105,18 +109,15 @@ private void testCancel( assertHitCount(prepareSearch(INDEX).setSize(0), numDocs); assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); - // Scroll by 1 so that cancellation is easier to control - builder.source().setSize(1); - /* Allow a random number of the documents less the number of workers * to be modified by the reindex action. That way at least one worker * is blocked. */ - int numModifiedDocs = randomIntBetween(builder.request().getSlices() * 2, numDocs); + int numModifiedDocs = randomIntBetween(request.getSlices() * 2, numDocs); logger.debug("chose to modify [{}] out of [{}] docs", numModifiedDocs, numDocs); - ALLOWED_OPERATIONS.release(numModifiedDocs - builder.request().getSlices()); + ALLOWED_OPERATIONS.release(numModifiedDocs - request.getSlices()); // Now execute the reindex action... - ActionFuture future = builder.execute(); + ActionFuture future = client().execute(action, request); /* ... and wait for the indexing operation listeners to block. It * is important to realize that some of the workers might have @@ -130,7 +131,7 @@ private void testCancel( ); // 10 seconds is usually fine but on heavily loaded machines this can take a while // Status should show the task running - TaskInfo mainTask = findTaskToCancel(action, builder.request().getSlices()); + TaskInfo mainTask = findTaskToCancel(action.name(), request.getSlices()); BulkByScrollTask.Status status = (BulkByScrollTask.Status) mainTask.status(); assertNull(status.getReasonCancelled()); @@ -150,7 +151,7 @@ private void testCancel( logger.debug("asserting that parent is marked canceled {}", status); assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); - if (builder.request().getSlices() > 1) { + if (request.getSlices() > 1) { boolean foundCancelled = false; ListTasksResponse sliceList = clusterAdmin().prepareListTasks() .setTargetParentTaskId(mainTask.taskId()) @@ -168,11 +169,11 @@ private void testCancel( } logger.debug("unblocking the blocked update"); - ALLOWED_OPERATIONS.release(builder.request().getSlices()); + ALLOWED_OPERATIONS.release(request.getSlices()); // Checks that no more operations are executed assertBusy(() -> { - if (builder.request().getSlices() == 1) { + if (request.getSlices() == 1) { /* We can only be sure that we've drained all the permits if we only use a single worker. Otherwise some worker may have * exhausted all of its documents before we blocked. */ assertEquals(0, ALLOWED_OPERATIONS.availablePermits()); @@ -191,7 +192,7 @@ private void testCancel( String tasks = clusterAdmin().prepareListTasks().setTargetParentTaskId(mainTask.taskId()).setDetailed(true).get().toString(); throw new RuntimeException("Exception while waiting for the response. Running tasks: " + tasks, e); } finally { - if (builder.request().getSlices() >= 1) { + if (request.getSlices() >= 1) { // If we have more than one worker we might not have made all the modifications numModifiedDocs -= ALLOWED_OPERATIONS.availablePermits(); } @@ -221,7 +222,7 @@ public static TaskInfo findTaskToCancel(String actionName, int workerCount) { } public void testReindexCancel() throws Exception { - testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest"), (response, total, modified) -> { + testCancel(ReindexAction.INSTANCE, reindex().source(INDEX).destination("dest"), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); refresh("dest"); @@ -239,17 +240,22 @@ public void testUpdateByQueryCancel() throws Exception { }"""); assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); - testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX), (response, total, modified) -> { - assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); - assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); - }, equalTo("update-by-query [" + INDEX + "]")); + testCancel( + UpdateByQueryAction.INSTANCE, + updateByQuery().setPipeline("set-processed").source(INDEX), + (response, total, modified) -> { + assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); + assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); + }, + equalTo("update-by-query [" + INDEX + "]") + ); assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get()); } public void testDeleteByQueryCancel() throws Exception { testCancel( - DeleteByQueryAction.NAME, + DeleteByQueryAction.INSTANCE, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); @@ -261,7 +267,7 @@ public void testDeleteByQueryCancel() throws Exception { public void testReindexCancelWithWorkers() throws Exception { testCancel( - ReindexAction.NAME, + ReindexAction.INSTANCE, reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest").setSlices(5), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); @@ -283,7 +289,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); testCancel( - UpdateByQueryAction.NAME, + UpdateByQueryAction.INSTANCE, updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); @@ -297,7 +303,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { public void testDeleteByQueryCancelWithWorkers() throws Exception { testCancel( - DeleteByQueryAction.NAME, + DeleteByQueryAction.INSTANCE, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java index 7ac50eb0e7c6c..44e69d3a4cda8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java @@ -162,8 +162,8 @@ public void testScrollKeepAlive() { private SearchResponse createSearchResponse() { // create a simulated response. - SearchHit hit = new SearchHit(0, "id").sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits( + SearchHit hit = SearchHit.unpooled(0, "id").sourceRef(new BytesArray("{}")); + SearchHits hits = SearchHits.unpooled( IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java index 855cb1863f399..24753c2b9ae6a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java @@ -39,7 +39,7 @@ public void testDeprecatedSort() { int subsetSize = randomIntBetween(1, max - 1); ReindexRequestBuilder copy = new ReindexRequestBuilder(client()).source("source").destination("dest").refresh(true); copy.maxDocs(subsetSize); - copy.request().addSortField("foo", SortOrder.DESC); + copy.source().addSort("foo", SortOrder.DESC); assertThat(copy.get(), matcher().created(subsetSize)); assertHitCount(client().prepareSearch("dest").setSize(0), subsetSize); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index fdd98992503d7..241707f6e0f93 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.reindex; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -16,22 +17,23 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; -import org.mockito.Mockito; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestDeleteByQueryActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); + controller().registerHandler(new RestDeleteByQueryAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } public void testTypeInPath() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java index 0df013056dcdd..3484b61ca2c9a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java @@ -24,6 +24,7 @@ import java.util.Collections; import static java.util.Collections.singletonMap; +import static org.mockito.Mockito.mock; public class RestReindexActionTests extends RestActionTestCase { @@ -31,7 +32,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(); + action = new RestReindexAction(mock(NamedWriteableRegistry.class)); controller().registerHandler(action); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index 889c8d0091c81..83e298c3a235f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.reindex; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -16,22 +17,23 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; -import org.mockito.Mockito; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; + public final class RestUpdateByQueryActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(BulkByScrollResponse.class)); + controller().registerHandler(new RestUpdateByQueryAction(mock(NamedWriteableRegistry.class))); + verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } public void testTypeInPath() throws IOException { diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index f5c1912d15251..e916b02e62b8e 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; @@ -41,6 +42,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Predicate; import java.util.regex.Pattern; @@ -196,12 +198,21 @@ private static class AzureHTTPStatsCollectorHandler extends HttpStatsCollectorHa private static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); private static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); + private final Set seenRequestIds = ConcurrentCollections.newConcurrentSet(); + private AzureHTTPStatsCollectorHandler(HttpHandler delegate) { super(delegate); } @Override protected void maybeTrack(String request, Headers headers) { + // Same request id is a retry + // https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-ncnbi/817da997-30d2-4cd3-972f-a0073e4e98f7 + // Do not count retries since the client side request stats do not track them yet. + // See https://github.com/elastic/elasticsearch/issues/104443 + if (false == seenRequestIds.add(headers.getFirst("X-ms-client-request-id"))) { + return; + } if (GET_BLOB_PATTERN.test(request)) { trackRequest("GetBlob"); } else if (Regex.simpleMatch("HEAD /*/*/*", request)) { diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java index 2dff8a10d39f7..ed2e0bf9be0b2 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java @@ -165,9 +165,9 @@ public void onNext(T element) { } public void cancel() { + done = true; cancelSubscription(); clearQueue(); - done = true; // cancel should be called from the consumer // thread, but to avoid potential deadlocks // we just try to release a possibly blocked @@ -177,9 +177,9 @@ public void cancel() { @Override public void onError(Throwable t) { + done = true; clearQueue(); error = t; - done = true; signalConsumer(); } diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index e70151cbdf8ee..9ad2c57b7f585 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -15,8 +15,8 @@ import com.sun.net.httpserver.HttpHandler; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.BlobContainer; @@ -191,7 +191,7 @@ public void testAbortRequestStats() throws Exception { waitForDocs(nbDocs, indexer); } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); @@ -234,7 +234,7 @@ public void testMetrics() throws Exception { waitForDocs(nbDocs, indexer); } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index ba762537537e3..83668cc271922 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; @@ -84,13 +85,13 @@ protected S3Repository createRepository( @Override public Collection createComponents(PluginServices services) { - service.set(s3Service(services.environment(), services.clusterService().getSettings())); + service.set(s3Service(services.environment(), services.clusterService().getSettings(), services.resourceWatcherService())); this.service.get().refreshAndClearCache(S3ClientSettings.load(settings)); return List.of(service); } - S3Service s3Service(Environment environment, Settings nodeSettings) { - return new S3Service(environment, nodeSettings); + S3Service s3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + return new S3Service(environment, nodeSettings, resourceWatcherService); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 04eadba9f9f8f..0d8b2561f18f9 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -14,9 +14,9 @@ import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.Version; import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; @@ -48,7 +48,7 @@ class S3RetryingInputStream extends InputStream { private final String blobKey; private final long start; private final long end; - private final List failures; + private final List failures; private S3ObjectInputStream currentStream; private long currentStreamFirstOffset; @@ -77,29 +77,36 @@ class S3RetryingInputStream extends InputStream { this.failures = new ArrayList<>(MAX_SUPPRESSED_EXCEPTIONS); this.start = start; this.end = end; - openStream(); + final int initialAttempt = attempt; + openStreamWithRetry(); + maybeLogForSuccessAfterRetries(initialAttempt, "opened"); } - private void openStream() throws IOException { - try (AmazonS3Reference clientReference = blobStore.clientReference()) { - final GetObjectRequest getObjectRequest = new GetObjectRequest(blobStore.bucket(), blobKey); - getObjectRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.GET_OBJECT, purpose)); - if (currentOffset > 0 || start > 0 || end < Long.MAX_VALUE - 1) { - assert start + currentOffset <= end - : "requesting beyond end, start = " + start + " offset=" + currentOffset + " end=" + end; - getObjectRequest.setRange(Math.addExact(start, currentOffset), end); - } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); - this.currentStreamFirstOffset = Math.addExact(start, currentOffset); - this.currentStreamLastOffset = Math.addExact(currentStreamFirstOffset, getStreamLength(s3Object)); - this.currentStream = s3Object.getObjectContent(); - } catch (final AmazonClientException e) { - if (e instanceof AmazonS3Exception amazonS3Exception) { - if (404 == amazonS3Exception.getStatusCode()) { - throw addSuppressedExceptions(new NoSuchFileException("Blob object [" + blobKey + "] not found: " + e.getMessage())); + private void openStreamWithRetry() throws IOException { + while (true) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + final GetObjectRequest getObjectRequest = new GetObjectRequest(blobStore.bucket(), blobKey); + getObjectRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.GET_OBJECT, purpose)); + if (currentOffset > 0 || start > 0 || end < Long.MAX_VALUE - 1) { + assert start + currentOffset <= end + : "requesting beyond end, start = " + start + " offset=" + currentOffset + " end=" + end; + getObjectRequest.setRange(Math.addExact(start, currentOffset), end); + } + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); + this.currentStreamFirstOffset = Math.addExact(start, currentOffset); + this.currentStreamLastOffset = Math.addExact(currentStreamFirstOffset, getStreamLength(s3Object)); + this.currentStream = s3Object.getObjectContent(); + return; + } catch (AmazonClientException e) { + if (e instanceof AmazonS3Exception amazonS3Exception && 404 == amazonS3Exception.getStatusCode()) { + throw addSuppressedExceptions( + new NoSuchFileException("Blob object [" + blobKey + "] not found: " + amazonS3Exception.getMessage()) + ); } + + final long delayInMillis = maybeLogAndComputeRetryDelay("opening", e); + delayBeforeRetry(delayInMillis); } - throw addSuppressedExceptions(e); } } @@ -125,14 +132,16 @@ private long getStreamLength(final S3Object object) { @Override public int read() throws IOException { ensureOpen(); + final int initialAttempt = attempt; while (true) { try { final int result = currentStream.read(); if (result == -1) { eof = true; - return -1; + } else { + currentOffset += 1; } - currentOffset += 1; + maybeLogForSuccessAfterRetries(initialAttempt, "read"); return result; } catch (IOException e) { reopenStreamOrFail(e); @@ -143,14 +152,16 @@ public int read() throws IOException { @Override public int read(byte[] b, int off, int len) throws IOException { ensureOpen(); + final int initialAttempt = attempt; while (true) { try { final int bytesRead = currentStream.read(b, off, len); if (bytesRead == -1) { eof = true; - return -1; + } else { + currentOffset += bytesRead; } - currentOffset += bytesRead; + maybeLogForSuccessAfterRetries(initialAttempt, "read"); return bytesRead; } catch (IOException e) { reopenStreamOrFail(e); @@ -166,45 +177,120 @@ private void ensureOpen() { } private void reopenStreamOrFail(IOException e) throws IOException { - if (purpose == OperationPurpose.REPOSITORY_ANALYSIS) { - logger.warn(() -> format(""" - failed reading [%s/%s] at offset [%s]""", blobStore.bucket(), blobKey, start + currentOffset), e); - throw e; - } - - final int maxAttempts = blobStore.getMaxRetries() + 1; - final long meaningfulProgressSize = Math.max(1L, blobStore.bufferSizeInBytes() / 100L); - final long currentStreamProgress = Math.subtractExact(Math.addExact(start, currentOffset), currentStreamFirstOffset); - if (currentStreamProgress >= meaningfulProgressSize) { + if (currentStreamProgress() >= meaningfulProgressSize) { failuresAfterMeaningfulProgress += 1; } - final Supplier messageSupplier = () -> format( - """ - failed reading [%s/%s] at offset [%s]; this was attempt [%s] to read this blob which yielded [%s] bytes; in total \ - [%s] of the attempts to read this blob have made meaningful progress and do not count towards the maximum number of \ - retries; the maximum number of read attempts which do not make meaningful progress is [%s]""", - blobStore.bucket(), - blobKey, - start + currentOffset, - attempt, - currentStreamProgress, - failuresAfterMeaningfulProgress, - maxAttempts - ); - if (attempt >= maxAttempts + failuresAfterMeaningfulProgress) { + final long delayInMillis = maybeLogAndComputeRetryDelay("reading", e); + maybeAbort(currentStream); + IOUtils.closeWhileHandlingException(currentStream); + + delayBeforeRetry(delayInMillis); + openStreamWithRetry(); + } + + // The method throws if the operation should *not* be retried. Otherwise, it keeps a record for the attempt and associated failure + // and compute the delay before retry. + private long maybeLogAndComputeRetryDelay(String action, T e) throws T { + if (shouldRetry(attempt) == false) { final var finalException = addSuppressedExceptions(e); - logger.warn(messageSupplier, finalException); + logForFailure(action, finalException); throw finalException; } - logger.debug(messageSupplier, e); - attempt += 1; + + // Log at info level for the 1st retry and then exponentially less + logForRetry(Integer.bitCount(attempt) == 1 ? Level.INFO : Level.DEBUG, action, e); if (failures.size() < MAX_SUPPRESSED_EXCEPTIONS) { failures.add(e); } - maybeAbort(currentStream); - IOUtils.closeWhileHandlingException(currentStream); - openStream(); + final long delayInMillis = getRetryDelayInMillis(); + attempt += 1; // increment after computing delay because attempt affects the result + return delayInMillis; + } + + private void logForFailure(String action, Exception e) { + logger.warn( + () -> format( + "failed %s [%s/%s] at offset [%s] with purpose [%s]", + action, + blobStore.bucket(), + blobKey, + start + currentOffset, + purpose.getKey() + ), + e + ); + } + + private void logForRetry(Level level, String action, Exception e) { + logger.log( + level, + () -> format( + """ + failed %s [%s/%s] at offset [%s] with purpose [%s]; \ + this was attempt [%s] to read this blob which yielded [%s] bytes; in total \ + [%s] of the attempts to read this blob have made meaningful progress and do not count towards the maximum number of \ + retries; the maximum number of read attempts which do not make meaningful progress is [%s]""", + action, + blobStore.bucket(), + blobKey, + start + currentOffset, + purpose.getKey(), + attempt, + currentStreamProgress(), + failuresAfterMeaningfulProgress, + maxRetriesForNoMeaningfulProgress() + ), + e + ); + } + + private void maybeLogForSuccessAfterRetries(int initialAttempt, String action) { + if (attempt > initialAttempt) { + logger.info( + "successfully {} input stream for [{}/{}] with purpose [{}] after [{}] retries", + action, + blobStore.bucket(), + blobKey, + purpose, + attempt - initialAttempt + ); + } + } + + private long currentStreamProgress() { + return Math.subtractExact(Math.addExact(start, currentOffset), currentStreamFirstOffset); + } + + private boolean shouldRetry(int attempt) { + if (purpose == OperationPurpose.REPOSITORY_ANALYSIS) { + return false; + } + if (purpose == OperationPurpose.INDICES) { + return true; + } + final int maxAttempts = blobStore.getMaxRetries() + 1; + return attempt < maxAttempts + failuresAfterMeaningfulProgress; + } + + private int maxRetriesForNoMeaningfulProgress() { + return purpose == OperationPurpose.INDICES ? Integer.MAX_VALUE : (blobStore.getMaxRetries() + 1); + } + + private void delayBeforeRetry(long delayInMillis) { + try { + assert shouldRetry(attempt - 1) : "should not have retried"; + Thread.sleep(delayInMillis); + } catch (InterruptedException e) { + logger.info("s3 input stream delay interrupted", e); + Thread.currentThread().interrupt(); + } + } + + // protected access for testing + protected long getRetryDelayInMillis() { + // Initial delay is 10 ms and cap max delay at 10 * 1024 millis, i.e. it retries every ~10 seconds at a minimum + return 10L << (Math.min(attempt - 1, 10)); } @Override @@ -247,7 +333,7 @@ public void reset() { } private T addSuppressedExceptions(T e) { - for (IOException failure : failures) { + for (Exception failure : failures) { e.addSuppressed(failure); } return e; diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 195a18891ebd0..fc58482651fa3 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -28,6 +28,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.Strings; @@ -37,6 +38,9 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; +import org.elasticsearch.watcher.FileChangesListener; +import org.elasticsearch.watcher.FileWatcher; +import org.elasticsearch.watcher.ResourceWatcherService; import java.io.Closeable; import java.io.IOException; @@ -68,7 +72,6 @@ class S3Service implements Closeable { TimeValue.timeValueHours(24), Setting.Property.NodeScope ); - private volatile Map clientsCache = emptyMap(); /** @@ -90,12 +93,13 @@ class S3Service implements Closeable { final TimeValue compareAndExchangeTimeToLive; final TimeValue compareAndExchangeAntiContentionDelay; - S3Service(Environment environment, Settings nodeSettings) { + S3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { webIdentityTokenCredentialsProvider = new CustomWebIdentityTokenCredentialsProvider( environment, System::getenv, System::getProperty, - Clock.systemUTC() + Clock.systemUTC(), + resourceWatcherService ); compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); @@ -178,6 +182,11 @@ S3ClientSettings settings(RepositoryMetadata repositoryMetadata) { // proxy for testing AmazonS3 buildClient(final S3ClientSettings clientSettings) { + final AmazonS3ClientBuilder builder = buildClientBuilder(clientSettings); + return SocketAccess.doPrivileged(builder::build); + } + + protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettings) { final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); builder.withCredentials(buildCredentials(LOGGER, clientSettings, webIdentityTokenCredentialsProvider)); builder.withClientConfiguration(buildConfiguration(clientSettings)); @@ -206,7 +215,7 @@ AmazonS3 buildClient(final S3ClientSettings clientSettings) { if (clientSettings.disableChunkedEncoding) { builder.disableChunkedEncoding(); } - return SocketAccess.doPrivileged(builder::build); + return builder; } // pkg private for tests @@ -328,7 +337,8 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials Environment environment, SystemEnvironment systemEnvironment, JvmEnvironment jvmEnvironment, - Clock clock + Clock clock, + ResourceWatcherService resourceWatcherService ) { // Check whether the original environment variable exists. If it doesn't, // the system doesn't support AWS web identity tokens @@ -390,6 +400,31 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials roleSessionName, webIdentityTokenFileSymlink.toString() ).withStsClient(stsClient).build(); + var watcher = new FileWatcher(webIdentityTokenFileSymlink); + watcher.addListener(new FileChangesListener() { + + @Override + public void onFileCreated(Path file) { + onFileChanged(file); + } + + @Override + public void onFileChanged(Path file) { + if (file.equals(webIdentityTokenFileSymlink)) { + LOGGER.debug("WS web identity token file [{}] changed, updating credentials", file); + credentialsProvider.refresh(); + } + } + }); + try { + resourceWatcherService.add(watcher, ResourceWatcherService.Frequency.LOW); + } catch (IOException e) { + throw new ElasticsearchException( + "failed to start watching AWS web identity token file [{}]", + e, + webIdentityTokenFileSymlink + ); + } } catch (Exception e) { stsClient.shutdown(); throw e; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java index cecb0cd147897..fb775ab31c04d 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java @@ -9,16 +9,21 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; import com.sun.net.httpserver.HttpServer; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.junit.After; import org.junit.Assert; import org.mockito.Mockito; @@ -36,12 +41,23 @@ import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.function.Consumer; import java.util.stream.Collectors; public class CustomWebIdentityTokenCredentialsProviderTests extends ESTestCase { private static final String ROLE_ARN = "arn:aws:iam::123456789012:role/FederatedWebIdentityRole"; private static final String ROLE_NAME = "aws-sdk-java-1651084775908"; + private final TestThreadPool threadPool = new TestThreadPool("test"); + private final Settings settings = Settings.builder().put("resource.reload.interval.low", TimeValue.timeValueMillis(100)).build(); + private final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); + + @After + public void shutdown() throws Exception { + resourceWatcherService.close(); + threadPool.shutdown(); + } private static Environment getEnvironment() throws IOException { Path configDirectory = createTempDir("web-identity-token-test"); @@ -53,7 +69,7 @@ private static Environment getEnvironment() throws IOException { } @SuppressForbidden(reason = "HTTP server is used for testing") - public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { + private static HttpServer getHttpServer(Consumer webIdentityTokenCheck) throws IOException { HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); httpServer.createContext("/", exchange -> { try (exchange) { @@ -62,6 +78,7 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { .map(e -> e.split("=")) .collect(Collectors.toMap(e -> e[0], e -> URLDecoder.decode(e[1], StandardCharsets.UTF_8))); assertEquals(ROLE_NAME, params.get("RoleSessionName")); + webIdentityTokenCheck.accept(params.get("WebIdentityToken")); exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8"); byte[] response = Strings.format( @@ -97,25 +114,41 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { } }); httpServer.start(); + return httpServer; + } - Environment environment = getEnvironment(); - - // No region is set, but the SDK shouldn't fail because of that - Map environmentVariables = Map.of( - "AWS_WEB_IDENTITY_TOKEN_FILE", - "/var/run/secrets/eks.amazonaws.com/serviceaccount/token", - "AWS_ROLE_ARN", - ROLE_ARN - ); - Map systemProperties = Map.of( + @SuppressForbidden(reason = "HTTP server is used for testing") + private static Map getSystemProperties(HttpServer httpServer) { + return Map.of( "com.amazonaws.sdk.stsMetadataServiceEndpointOverride", "http://" + httpServer.getAddress().getHostName() + ":" + httpServer.getAddress().getPort() ); + } + + private static Map environmentVariables() { + return Map.of("AWS_WEB_IDENTITY_TOKEN_FILE", "/var/run/secrets/eks.amazonaws.com/serviceaccount/token", "AWS_ROLE_ARN", ROLE_ARN); + } + + private static void assertCredentials(AWSCredentials credentials) { + Assert.assertFalse(credentials.getAWSAccessKeyId().isEmpty()); + Assert.assertFalse(credentials.getAWSSecretKey().isEmpty()); + } + + @SuppressForbidden(reason = "HTTP server is used for testing") + public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { + HttpServer httpServer = getHttpServer(s -> assertEquals("YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl", s)); + + Environment environment = getEnvironment(); + + // No region is set, but the SDK shouldn't fail because of that + Map environmentVariables = environmentVariables(); + Map systemProperties = getSystemProperties(httpServer); var webIdentityTokenCredentialsProvider = new S3Service.CustomWebIdentityTokenCredentialsProvider( environment, environmentVariables::get, systemProperties::getOrDefault, - Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC) + Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC), + resourceWatcherService ); try { AWSCredentials credentials = S3Service.buildCredentials( @@ -124,8 +157,64 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { webIdentityTokenCredentialsProvider ).getCredentials(); - Assert.assertEquals("sts_access_key", credentials.getAWSAccessKeyId()); - Assert.assertEquals("secret_access_key", credentials.getAWSSecretKey()); + assertCredentials(credentials); + } finally { + webIdentityTokenCredentialsProvider.shutdown(); + httpServer.stop(0); + } + } + + private static class DelegatingConsumer implements Consumer { + private Consumer delegate; + + private DelegatingConsumer(Consumer delegate) { + this.delegate = delegate; + } + + private void setDelegate(Consumer delegate) { + this.delegate = delegate; + } + + @Override + public void accept(String s) { + delegate.accept(s); + } + } + + @SuppressForbidden(reason = "HTTP server is used for testing") + public void testPickUpNewWebIdentityTokenWhenItsChanged() throws Exception { + DelegatingConsumer webIdentityTokenCheck = new DelegatingConsumer(s -> assertEquals("YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl", s)); + + HttpServer httpServer = getHttpServer(webIdentityTokenCheck); + Environment environment = getEnvironment(); + Map environmentVariables = environmentVariables(); + Map systemProperties = getSystemProperties(httpServer); + var webIdentityTokenCredentialsProvider = new S3Service.CustomWebIdentityTokenCredentialsProvider( + environment, + environmentVariables::get, + systemProperties::getOrDefault, + Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC), + resourceWatcherService + ); + try { + AWSCredentialsProvider awsCredentialsProvider = S3Service.buildCredentials( + LogManager.getLogger(S3Service.class), + S3ClientSettings.getClientSettings(Settings.EMPTY, randomAlphaOfLength(8)), + webIdentityTokenCredentialsProvider + ); + assertCredentials(awsCredentialsProvider.getCredentials()); + + var latch = new CountDownLatch(1); + String newWebIdentityToken = "88f84342080d4671a511e10ae905b2b0"; + webIdentityTokenCheck.setDelegate(s -> { + if (s.equals(newWebIdentityToken)) { + latch.countDown(); + } + }); + Files.writeString(environment.configFile().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); + + safeAwait(latch); + assertCredentials(awsCredentialsProvider.getCredentials()); } finally { webIdentityTokenCredentialsProvider.shutdown(); httpServer.stop(0); @@ -149,7 +238,8 @@ public void testSupportRegionalizedEndpoints() throws Exception { getEnvironment(), environmentVariables::get, systemProperties::getOrDefault, - Clock.systemUTC() + Clock.systemUTC(), + resourceWatcherService ); // We can't verify that webIdentityTokenCredentialsProvider's STS client uses the "https://sts.us-west-2.amazonaws.com" // endpoint in a unit test. The client depends on hardcoded RegionalEndpointsOptionResolver that in turn depends diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 085d438618a19..28a48c2968f59 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestGetRepositoriesAction; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.security.AccessController; @@ -274,8 +275,8 @@ protected void assertSnapshotOrGenericThread() { } @Override - S3Service s3Service(Environment environment, Settings nodeSettings) { - return new ProxyS3Service(environment, nodeSettings); + S3Service s3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + return new ProxyS3Service(environment, nodeSettings, resourceWatcherService); } public static final class ClientAndCredentials extends AmazonS3Wrapper { @@ -291,8 +292,8 @@ public static final class ProxyS3Service extends S3Service { private static final Logger logger = LogManager.getLogger(ProxyS3Service.class); - ProxyS3Service(Environment environment, Settings nodeSettings) { - super(environment, nodeSettings); + ProxyS3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + super(environment, nodeSettings, resourceWatcherService); } @Override diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 8f273bcad3cf5..04a836997e0f7 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -7,7 +7,9 @@ */ package org.elasticsearch.repositories.s3; +import com.amazonaws.DnsResolver; import com.amazonaws.SdkClientException; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.internal.MD5DigestCalculatingInputStream; import com.amazonaws.util.Base16; import com.sun.net.httpserver.HttpExchange; @@ -39,6 +41,7 @@ import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; +import org.elasticsearch.watcher.ResourceWatcherService; import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; @@ -50,10 +53,14 @@ import java.io.InputStream; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; +import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.nio.file.NoSuchFileException; import java.util.Arrays; import java.util.Locale; import java.util.Objects; +import java.util.OptionalInt; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; @@ -67,6 +74,7 @@ import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -80,10 +88,25 @@ public class S3BlobContainerRetriesTests extends AbstractBlobContainerRetriesTestCase { private S3Service service; + private AtomicBoolean shouldErrorOnDns; @Before public void setUp() throws Exception { - service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY); + shouldErrorOnDns = new AtomicBoolean(false); + service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY, Mockito.mock(ResourceWatcherService.class)) { + @Override + protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettings) { + final AmazonS3ClientBuilder builder = super.buildClientBuilder(clientSettings); + final DnsResolver defaultDnsResolver = builder.getClientConfiguration().getDnsResolver(); + builder.getClientConfiguration().setDnsResolver(host -> { + if (shouldErrorOnDns.get() && randomBoolean() && randomBoolean()) { + throw new UnknownHostException(host); + } + return defaultDnsResolver.resolve(host); + }); + return builder; + } + }; super.setUp(); } @@ -150,29 +173,51 @@ protected BlobContainer createBlobContainer( Settings.builder().put(S3Repository.CLIENT_NAME.getKey(), clientName).build() ); - return new S3BlobContainer( - randomBoolean() ? BlobPath.EMPTY : BlobPath.EMPTY.add("foo"), - new S3BlobStore( - service, - "bucket", - S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getDefault(Settings.EMPTY), - bufferSize == null ? S3Repository.BUFFER_SIZE_SETTING.getDefault(Settings.EMPTY) : bufferSize, - S3Repository.CANNED_ACL_SETTING.getDefault(Settings.EMPTY), - S3Repository.STORAGE_CLASS_SETTING.getDefault(Settings.EMPTY), - repositoryMetadata, - BigArrays.NON_RECYCLING_INSTANCE, - new DeterministicTaskQueue().getThreadPool(), - RepositoriesMetrics.NOOP - ) - ) { + final S3BlobStore s3BlobStore = new S3BlobStore( + service, + "bucket", + S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getDefault(Settings.EMPTY), + bufferSize == null ? S3Repository.BUFFER_SIZE_SETTING.getDefault(Settings.EMPTY) : bufferSize, + S3Repository.CANNED_ACL_SETTING.getDefault(Settings.EMPTY), + S3Repository.STORAGE_CLASS_SETTING.getDefault(Settings.EMPTY), + repositoryMetadata, + BigArrays.NON_RECYCLING_INSTANCE, + new DeterministicTaskQueue().getThreadPool(), + RepositoriesMetrics.NOOP + ); + return new S3BlobContainer(randomBoolean() ? BlobPath.EMPTY : BlobPath.EMPTY.add("foo"), s3BlobStore) { @Override public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { - return new AssertingInputStream(super.readBlob(purpose, blobName), blobName); + return new AssertingInputStream(new S3RetryingInputStream(purpose, s3BlobStore, buildKey(blobName)) { + @Override + protected long getRetryDelayInMillis() { + assert super.getRetryDelayInMillis() > 0; + return 0; + } + }, blobName); } @Override public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { - return new AssertingInputStream(super.readBlob(purpose, blobName, position, length), blobName, position, length); + final InputStream inputStream; + if (length == 0) { + inputStream = new ByteArrayInputStream(new byte[0]); + } else { + inputStream = new S3RetryingInputStream( + purpose, + s3BlobStore, + buildKey(blobName), + position, + Math.addExact(position, length - 1) + ) { + @Override + protected long getRetryDelayInMillis() { + assert super.getRetryDelayInMillis() > 0; + return 0; + } + }; + } + return new AssertingInputStream(inputStream, blobName, position, length); } }; } @@ -574,6 +619,118 @@ public void handle(HttpExchange exchange) throws IOException { }); } + public void testReadWithIndicesPurposeRetriesForever() throws IOException { + final int maxRetries = between(0, 5); + final int totalFailures = Math.max(30, maxRetries * between(30, 80)); + final int bufferSizeBytes = scaledRandomIntBetween( + 0, + randomFrom(1000, Math.toIntExact(S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY).getBytes())) + ); + final BlobContainer blobContainer = createBlobContainer(maxRetries, null, true, ByteSizeValue.ofBytes(bufferSizeBytes)); + final int meaningfulProgressBytes = Math.max(1, bufferSizeBytes / 100); + + final byte[] bytes = randomBlobContent(512); + + shouldErrorOnDns.set(true); + final AtomicInteger failures = new AtomicInteger(); + @SuppressForbidden(reason = "use a http server") + class FlakyReadHandler implements HttpHandler { + + @Override + public void handle(HttpExchange exchange) throws IOException { + Streams.readFully(exchange.getRequestBody()); + if (failures.get() > totalFailures && randomBoolean()) { + final int rangeStart = getRangeStart(exchange); + assertThat(rangeStart, lessThan(bytes.length)); + exchange.getResponseHeaders().add("Content-Type", bytesContentType()); + final OptionalInt rangeEnd = getRangeEnd(exchange); + final int length; + if (rangeEnd.isPresent() == false) { + final var remainderLength = bytes.length - rangeStart; + exchange.sendResponseHeaders(HttpStatus.SC_OK, remainderLength); + length = remainderLength < meaningfulProgressBytes + ? remainderLength + : between(meaningfulProgressBytes, remainderLength); + } else { + final int effectiveRangeEnd = Math.min(bytes.length - 1, rangeEnd.getAsInt()); + length = (effectiveRangeEnd - rangeStart) + 1; + exchange.sendResponseHeaders(HttpStatus.SC_OK, length); + } + exchange.getResponseBody().write(bytes, rangeStart, length); + } else { + failures.incrementAndGet(); + if (randomBoolean()) { + exchange.sendResponseHeaders( + randomFrom( + HttpStatus.SC_INTERNAL_SERVER_ERROR, + HttpStatus.SC_BAD_GATEWAY, + HttpStatus.SC_SERVICE_UNAVAILABLE, + HttpStatus.SC_GATEWAY_TIMEOUT + ), + -1 + ); + } else { + if (randomBoolean()) { + final var bytesSent = sendIncompleteContent(exchange, bytes); + if (bytesSent >= meaningfulProgressBytes) { + exchange.getResponseBody().flush(); + } + } + } + } + exchange.close(); + } + } + + httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_retries_forever"), new FlakyReadHandler()); + + // Ranged read + final int position = between(0, bytes.length - 1); + final int length = between(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); + logger.info("--> position={}, length={}", position, length); + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.INDICES, "read_blob_retries_forever", position, length)) { + final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); + assertArrayEquals(Arrays.copyOfRange(bytes, position, Math.min(bytes.length, position + length)), bytesRead); + } + assertThat(failures.get(), greaterThan(totalFailures)); + + // Read the whole blob + failures.set(0); + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.INDICES, "read_blob_retries_forever")) { + final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); + assertArrayEquals(bytes, bytesRead); + } + assertThat(failures.get(), greaterThan(totalFailures)); + } + + public void testDoesNotRetryOnNotFound() { + final int maxRetries = between(3, 5); + final BlobContainer blobContainer = createBlobContainer(maxRetries, null, true, null); + + final AtomicInteger numberOfReads = new AtomicInteger(0); + @SuppressForbidden(reason = "use a http server") + class NotFoundReadHandler implements HttpHandler { + @Override + public void handle(HttpExchange exchange) throws IOException { + numberOfReads.incrementAndGet(); + exchange.sendResponseHeaders(HttpStatus.SC_NOT_FOUND, -1); + exchange.close(); + } + } + + httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_not_found"), new NotFoundReadHandler()); + expectThrows(NoSuchFileException.class, () -> { + try ( + InputStream inputStream = randomBoolean() + ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_not_found") + : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_not_found", between(0, 100), between(1, 100)) + ) { + Streams.readFully(inputStream); + } + }); + assertThat(numberOfReads.get(), equalTo(1)); + } + @Override protected Matcher getMaxRetriesMatcher(int maxRetries) { // some attempts make meaningful progress and do not count towards the max retry limit @@ -585,6 +742,14 @@ protected OperationPurpose randomRetryingPurpose() { return randomValueOtherThan(OperationPurpose.REPOSITORY_ANALYSIS, BlobStoreTestUtil::randomPurpose); } + @Override + protected OperationPurpose randomFiniteRetryingPurpose() { + return randomValueOtherThanMany( + purpose -> purpose == OperationPurpose.REPOSITORY_ANALYSIS || purpose == OperationPurpose.INDICES, + BlobStoreTestUtil::randomPurpose + ); + } + /** * Asserts that an InputStream is fully consumed, or aborted, when it is closed */ @@ -605,6 +770,8 @@ private static class AssertingInputStream extends FilterInputStream { AssertingInputStream(InputStream in, String blobName, long position, long length) { super(in); + assert position >= 0L; + assert length >= 0; this.blobName = blobName; this.position = position; this.length = length; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java index c48e0dc337d30..31bfd3a5e157f 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.mockito.Mockito; import java.io.IOException; @@ -178,7 +179,7 @@ public void testRegionCanBeSet() throws IOException { ); assertThat(settings.get("default").region, is("")); assertThat(settings.get("other").region, is(region)); - try (S3Service s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY)) { + try (var s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY, Mockito.mock(ResourceWatcherService.class))) { AmazonS3Client other = (AmazonS3Client) s3Service.buildClient(settings.get("other")); assertThat(other.getSignerRegionOverride(), is(region)); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index ab5edc4608bfd..0a92ed0a28973 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.hamcrest.Matchers; import org.mockito.Mockito; @@ -45,8 +46,8 @@ public void shutdown() { private static class DummyS3Service extends S3Service { - DummyS3Service(Environment environment) { - super(environment, Settings.EMPTY); + DummyS3Service(Environment environment, ResourceWatcherService resourceWatcherService) { + super(environment, Settings.EMPTY, resourceWatcherService); } @Override @@ -125,7 +126,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { return new S3Repository( metadata, NamedXContentRegistry.EMPTY, - new DummyS3Service(Mockito.mock(Environment.class)), + new DummyS3Service(Mockito.mock(Environment.class), Mockito.mock(ResourceWatcherService.class)), BlobStoreTestUtil.mockClusterService(), MockBigArrays.NON_RECYCLING_INSTANCE, new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java index bbdeea6d87631..33e56bcf2180b 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.mockito.Mockito; import java.io.IOException; @@ -18,7 +19,11 @@ public class S3ServiceTests extends ESTestCase { public void testCachedClientsAreReleased() throws IOException { - final S3Service s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY); + final S3Service s3Service = new S3Service( + Mockito.mock(Environment.class), + Settings.EMPTY, + Mockito.mock(ResourceWatcherService.class) + ); final Settings settings = Settings.builder().put("endpoint", "http://first").build(); final RepositoryMetadata metadata1 = new RepositoryMetadata("first", "s3", settings); final RepositoryMetadata metadata2 = new RepositoryMetadata("second", "s3", settings); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java b/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java index 0bd3ad462ef70..ec0ca43c84b49 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java @@ -8,7 +8,6 @@ package org.elasticsearch.plugin.repository.url; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.settings.Setting; @@ -25,13 +24,13 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; public class URLRepositoryPlugin extends Plugin implements RepositoryPlugin { - private final SetOnce httpClientFactory = new SetOnce<>(); + private final AtomicReference httpClientFactory = new AtomicReference<>(); @Override public List> getSettings() { @@ -51,27 +50,18 @@ public Map getRepositories( RecoverySettings recoverySettings, RepositoriesMetrics repositoriesMetrics ) { - return Collections.singletonMap(URLRepository.TYPE, metadata -> { - assert httpClientFactory.get() != null : "Expected to get a configured http client factory"; - return new URLRepository( + return Collections.singletonMap( + URLRepository.TYPE, + metadata -> new URLRepository( metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings, - httpClientFactory.get() - ); - }); - } - - @Override - public Collection createComponents(PluginServices services) { - - final URLHttpClient.Factory apacheURLHttpClientFactory = new URLHttpClient.Factory(); - - httpClientFactory.set(apacheURLHttpClientFactory); - return List.of(apacheURLHttpClientFactory); + httpClientFactory.updateAndGet(factory -> factory == null ? new URLHttpClient.Factory() : factory) + ) + ); } @Override diff --git a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java index 62063ddab9129..ad7b821c986c1 100644 --- a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java +++ b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainRestPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -27,11 +28,12 @@ public class MainRestPlugin extends Plugin implements ActionPlugin { - public static final ActionType MAIN_ACTION = ActionType.localOnly("cluster:monitor/main"); + public static final ActionType MAIN_ACTION = new ActionType<>("cluster:monitor/main"); @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedEncodingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedEncodingIT.java new file mode 100644 index 0000000000000..8cd68abdcce42 --- /dev/null +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedEncodingIT.java @@ -0,0 +1,174 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.http.netty4; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.recycler.Recycler; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.ChunkedRestResponseBody; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestResponse.TEXT_CONTENT_TYPE; +import static org.hamcrest.Matchers.containsString; + +public class Netty4ChunkedEncodingIT extends ESNetty4IntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.concatLists(List.of(YieldsChunksPlugin.class), super.nodePlugins()); + } + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + private static final String EXPECTED_NONEMPTY_BODY = """ + chunk-0 + chunk-1 + chunk-2 + """; + + public void testNonemptyResponse() throws IOException { + getAndCheckBodyContents(YieldsChunksPlugin.CHUNKS_ROUTE, EXPECTED_NONEMPTY_BODY); + } + + public void testEmptyResponse() throws IOException { + getAndCheckBodyContents(YieldsChunksPlugin.EMPTY_ROUTE, ""); + } + + private static void getAndCheckBodyContents(String route, String expectedBody) throws IOException { + final var response = getRestClient().performRequest(new Request("GET", route)); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(response.getEntity().getContentType().toString(), containsString(TEXT_CONTENT_TYPE)); + if (Strings.hasLength(expectedBody)) { + assertTrue(response.getEntity().isChunked()); + } // else we might have no chunks to send which doesn't need chunked-encoding + final String body; + try (var reader = new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)) { + body = Streams.copyToString(reader); + } + assertEquals(expectedBody, body); + } + + public static class YieldsChunksPlugin extends Plugin implements ActionPlugin { + static final String CHUNKS_ROUTE = "/_test/yields_chunks"; + static final String EMPTY_ROUTE = "/_test/yields_only_empty_chunks"; + + private static Iterator emptyChunks() { + return Iterators.forRange(0, between(0, 2), i -> BytesArray.EMPTY); + } + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return CHUNKS_ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(GET, CHUNKS_ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + return channel -> sendChunksResponse( + channel, + Iterators.concat( + emptyChunks(), + Iterators.flatMap( + Iterators.forRange(0, 3, i -> "chunk-" + i + '\n'), + chunk -> Iterators.concat(Iterators.single(new BytesArray(chunk)), emptyChunks()) + ) + ) + ); + } + }, new BaseRestHandler() { + @Override + public String getName() { + return EMPTY_ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(GET, EMPTY_ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + return channel -> sendChunksResponse(channel, emptyChunks()); + } + }); + } + + private static void sendChunksResponse(RestChannel channel, Iterator chunkIterator) { + channel.sendResponse(RestResponse.chunked(RestStatus.OK, new ChunkedRestResponseBody() { + @Override + public boolean isDone() { + return chunkIterator.hasNext() == false; + } + + @Override + public ReleasableBytesReference encodeChunk(int sizeHint, Recycler recycler) { + final var page = recycler.obtain(); // just to ensure nothing is leaked + return new ReleasableBytesReference(chunkIterator.next(), page); + } + + @Override + public String getResponseContentTypeString() { + return TEXT_CONTENT_TYPE; + } + }, null)); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4ChunkedHttpResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4ChunkedHttpResponse.java index cbcb807f335dc..f5f32bf333779 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4ChunkedHttpResponse.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4ChunkedHttpResponse.java @@ -12,13 +12,14 @@ import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; +import org.elasticsearch.http.HttpResponse; import org.elasticsearch.rest.ChunkedRestResponseBody; import org.elasticsearch.rest.RestStatus; /** * A http response that will be transferred via chunked encoding when handled by {@link Netty4HttpPipeliningHandler}. */ -public final class Netty4ChunkedHttpResponse extends DefaultHttpResponse implements Netty4RestResponse { +public final class Netty4ChunkedHttpResponse extends DefaultHttpResponse implements Netty4HttpResponse, HttpResponse { private final int sequence; @@ -38,4 +39,14 @@ public ChunkedRestResponseBody body() { public int getSequence() { return sequence; } + + @Override + public void addHeader(String name, String value) { + headers().add(name, value); + } + + @Override + public boolean containsHeader(String name) { + return headers().contains(name); + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4FullHttpResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4FullHttpResponse.java new file mode 100644 index 0000000000000..a350427c75ec5 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4FullHttpResponse.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpVersion; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.http.HttpResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.netty4.Netty4Utils; + +public final class Netty4FullHttpResponse extends DefaultFullHttpResponse implements Netty4HttpResponse, HttpResponse { + + private final int sequence; + + Netty4FullHttpResponse(int sequence, HttpVersion version, RestStatus status, BytesReference content) { + super(version, HttpResponseStatus.valueOf(status.getStatus()), Netty4Utils.toByteBuf(content)); + this.sequence = sequence; + } + + @Override + public int getSequence() { + return sequence; + } + + @Override + public void addHeader(String name, String value) { + headers().add(name, value); + } + + @Override + public boolean containsHeader(String name) { + return headers().contains(name); + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index f5a32a0ec768c..b2947b32ebdde 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -25,6 +25,7 @@ import io.netty.util.concurrent.Future; import io.netty.util.concurrent.PromiseCombiner; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.ReleasableBytesReference; @@ -52,12 +53,12 @@ */ public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler { - private final Logger logger; + private static final Logger logger = LogManager.getLogger(Netty4HttpPipeliningHandler.class); private final int maxEventsHeld; - private final PriorityQueue> outboundHoldingQueue; + private final PriorityQueue> outboundHoldingQueue; - private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, Netty4ChunkedHttpResponse response) {} + private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, ChunkedRestResponseBody responseBody) {} /** * The current {@link ChunkedWrite} if a chunked write is executed at the moment. @@ -86,12 +87,10 @@ private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, Net /** * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. * - * @param logger for logging unexpected errors * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is * required as events cannot queue up indefinitely */ - public Netty4HttpPipeliningHandler(Logger logger, final int maxEventsHeld, final Netty4HttpServerTransport serverTransport) { - this.logger = logger; + public Netty4HttpPipeliningHandler(final int maxEventsHeld, final Netty4HttpServerTransport serverTransport) { this.maxEventsHeld = maxEventsHeld; this.outboundHoldingQueue = new PriorityQueue<>(1, Comparator.comparingInt(t -> t.v1().getSequence())); this.serverTransport = serverTransport; @@ -136,10 +135,10 @@ protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpReque @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws IOException { - assert msg instanceof Netty4RestResponse : "Invalid message type: " + msg.getClass(); + assert msg instanceof Netty4HttpResponse : "Invalid message type: " + msg.getClass(); boolean success = false; try { - final Netty4RestResponse restResponse = (Netty4RestResponse) msg; + final Netty4HttpResponse restResponse = (Netty4HttpResponse) msg; if (restResponse.getSequence() != writeSequence) { assert restResponse.getSequence() > writeSequence : "response sequence [" + restResponse.getSequence() + "] we below write sequence [" + writeSequence + "]"; @@ -150,6 +149,8 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann ); } // response is not at the current sequence number so we add it to the outbound queue and return + assert outboundHoldingQueue.stream().noneMatch(t -> t.v1().getSequence() == writeSequence) + : "duplicate outbound entries for seqno " + writeSequence; outboundHoldingQueue.add(new Tuple<>(restResponse, promise)); success = true; return; @@ -172,7 +173,7 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann private void doWriteQueued(ChannelHandlerContext ctx) throws IOException { while (outboundHoldingQueue.isEmpty() == false && outboundHoldingQueue.peek().v1().getSequence() == writeSequence) { - final Tuple top = outboundHoldingQueue.poll(); + final Tuple top = outboundHoldingQueue.poll(); assert top != null : "we know the outbound holding queue to not be empty at this point"; doWrite(ctx, top.v1(), top.v2()); } @@ -189,19 +190,24 @@ private void doWriteQueued(ChannelHandlerContext ctx) throws IOException { SPLIT_THRESHOLD = (int) (NettyAllocator.suggestedMaxAllocationSize() * 0.99); } - private void doWrite(ChannelHandlerContext ctx, Netty4RestResponse readyResponse, ChannelPromise promise) throws IOException { + private void doWrite(ChannelHandlerContext ctx, Netty4HttpResponse readyResponse, ChannelPromise promise) throws IOException { assert currentChunkedWrite == null : "unexpected existing write [" + currentChunkedWrite + "]"; - if (readyResponse instanceof Netty4HttpResponse) { - doWrite(ctx, (Netty4HttpResponse) readyResponse, promise); + assert readyResponse != null : "cannot write null response"; + assert readyResponse.getSequence() == writeSequence; + if (readyResponse instanceof Netty4FullHttpResponse fullResponse) { + doWriteFullResponse(ctx, fullResponse, promise); + } else if (readyResponse instanceof Netty4ChunkedHttpResponse chunkedResponse) { + doWriteChunkedResponse(ctx, chunkedResponse, promise); } else { - doWrite(ctx, (Netty4ChunkedHttpResponse) readyResponse, promise); + assert false : readyResponse.getClass().getCanonicalName(); + throw new IllegalStateException("illegal message type: " + readyResponse.getClass().getCanonicalName()); } } /** * Split up large responses to prevent batch compression {@link JdkZlibEncoder} down the pipeline. */ - private void doWrite(ChannelHandlerContext ctx, Netty4HttpResponse readyResponse, ChannelPromise promise) { + private void doWriteFullResponse(ChannelHandlerContext ctx, Netty4FullHttpResponse readyResponse, ChannelPromise promise) { if (DO_NOT_SPLIT_HTTP_RESPONSES || readyResponse.content().readableBytes() <= SPLIT_THRESHOLD) { enqueueWrite(ctx, readyResponse, promise); } else { @@ -210,16 +216,19 @@ private void doWrite(ChannelHandlerContext ctx, Netty4HttpResponse readyResponse writeSequence++; } - private void doWrite(ChannelHandlerContext ctx, Netty4ChunkedHttpResponse readyResponse, ChannelPromise promise) throws IOException { + private void doWriteChunkedResponse(ChannelHandlerContext ctx, Netty4ChunkedHttpResponse readyResponse, ChannelPromise promise) + throws IOException { final PromiseCombiner combiner = new PromiseCombiner(ctx.executor()); final ChannelPromise first = ctx.newPromise(); combiner.add((Future) first); - currentChunkedWrite = new ChunkedWrite(combiner, promise, readyResponse); + final var responseBody = readyResponse.body(); + assert currentChunkedWrite == null; + currentChunkedWrite = new ChunkedWrite(combiner, promise, responseBody); if (enqueueWrite(ctx, readyResponse, first)) { // We were able to write out the first chunk directly, try writing out subsequent chunks until the channel becomes unwritable. // NB "writable" means there's space in the downstream ChannelOutboundBuffer, we aren't trying to saturate the physical channel. while (ctx.channel().isWritable()) { - if (writeChunk(ctx, combiner, readyResponse.body())) { + if (writeChunk(ctx, combiner, responseBody)) { finishChunkedWrite(); return; } @@ -228,15 +237,15 @@ private void doWrite(ChannelHandlerContext ctx, Netty4ChunkedHttpResponse readyR } private void finishChunkedWrite() { - try { - currentChunkedWrite.combiner.finish(currentChunkedWrite.onDone); - } finally { - currentChunkedWrite = null; - writeSequence++; - } + assert currentChunkedWrite != null; + assert currentChunkedWrite.responseBody().isDone(); + final var finishingWrite = currentChunkedWrite; + currentChunkedWrite = null; + writeSequence++; + finishingWrite.combiner.finish(finishingWrite.onDone()); } - private void splitAndWrite(ChannelHandlerContext ctx, Netty4HttpResponse msg, ChannelPromise promise) { + private void splitAndWrite(ChannelHandlerContext ctx, Netty4FullHttpResponse msg, ChannelPromise promise) { final PromiseCombiner combiner = new PromiseCombiner(ctx.executor()); HttpResponse response = new DefaultHttpResponse(msg.protocolVersion(), msg.status(), msg.headers()); combiner.add(enqueueWrite(ctx, response)); @@ -293,7 +302,7 @@ private boolean doFlush(ChannelHandlerContext ctx) throws IOException { if (currentWrite == null) { // no bytes were found queued, check if a chunked message might have become writable if (currentChunkedWrite != null) { - if (writeChunk(ctx, currentChunkedWrite.combiner, currentChunkedWrite.response.body())) { + if (writeChunk(ctx, currentChunkedWrite.combiner, currentChunkedWrite.responseBody())) { finishChunkedWrite(); } continue; @@ -315,7 +324,6 @@ private boolean writeChunk(ChannelHandlerContext ctx, PromiseCombiner combiner, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE, serverTransport.recycler() ); - assert bytes.length() > 0 : "serialization should not produce empty buffers"; final ByteBuf content = Netty4Utils.toByteBuf(bytes); final boolean done = body.isDone(); final ChannelFuture f = ctx.write(done ? new DefaultLastHttpContent(content) : new DefaultHttpContent(content)); @@ -337,11 +345,11 @@ public void close(ChannelHandlerContext ctx, ChannelPromise promise) { safeFailPromise(currentChunkedWrite.onDone, new ClosedChannelException()); currentChunkedWrite = null; } - List> inflightResponses = removeAllInflightResponses(); + List> inflightResponses = removeAllInflightResponses(); if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - for (Tuple inflightResponse : inflightResponses) { + for (Tuple inflightResponse : inflightResponses) { safeFailPromise(inflightResponse.v2(), closedChannelException); } } @@ -386,8 +394,8 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { } } - private List> removeAllInflightResponses() { - ArrayList> responses = new ArrayList<>(outboundHoldingQueue); + private List> removeAllInflightResponses() { + ArrayList> responses = new ArrayList<>(outboundHoldingQueue); outboundHoldingQueue.clear(); return responses; } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index 37b6794238b81..0e1bb527fed9d 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -171,8 +171,8 @@ public HttpRequest removeHeader(String header) { } @Override - public Netty4HttpResponse createResponse(RestStatus status, BytesReference contentRef) { - return new Netty4HttpResponse(sequence, request.protocolVersion(), status, contentRef); + public Netty4FullHttpResponse createResponse(RestStatus status, BytesReference contentRef) { + return new Netty4FullHttpResponse(sequence, request.protocolVersion(), status, contentRef); } @Override diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java index e7a88fac4fc67..3396b13cdab0f 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java @@ -8,25 +8,13 @@ package org.elasticsearch.http.netty4; -import io.netty.handler.codec.http.DefaultFullHttpResponse; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpVersion; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.transport.netty4.Netty4Utils; - -public class Netty4HttpResponse extends DefaultFullHttpResponse implements Netty4RestResponse { - - private final int sequence; - - Netty4HttpResponse(int sequence, HttpVersion version, RestStatus status, BytesReference content) { - super(version, HttpResponseStatus.valueOf(status.getStatus()), Netty4Utils.toByteBuf(content)); - this.sequence = sequence; - } - - @Override - public int getSequence() { - return sequence; - } +/** + * Super-interface for responses handled by the Netty4 HTTP transport. + */ +public sealed interface Netty4HttpResponse permits Netty4FullHttpResponse, Netty4ChunkedHttpResponse { + /** + * @return The sequence number for the request which corresponds with this response, for making sure that we send responses to pipelined + * requests in the correct order. + */ + int getSequence(); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 8eb1a5789102c..274240a40bd46 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -419,8 +419,8 @@ protected HttpMessage createMessage(String[] initialLine) throws Exception { protected boolean isContentAlwaysEmpty(HttpResponse msg) { // non-chunked responses (Netty4HttpResponse extends Netty's DefaultFullHttpResponse) with chunked transfer // encoding are only sent by us in response to HEAD requests and must always have an empty body - if (msg instanceof Netty4HttpResponse netty4HttpResponse && HttpUtil.isTransferEncodingChunked(msg)) { - assert netty4HttpResponse.content().isReadable() == false; + if (msg instanceof Netty4FullHttpResponse netty4FullHttpResponse && HttpUtil.isTransferEncodingChunked(msg)) { + assert netty4FullHttpResponse.content().isReadable() == false; return true; } return super.isContentAlwaysEmpty(msg); @@ -430,7 +430,7 @@ protected boolean isContentAlwaysEmpty(HttpResponse msg) { if (handlingSettings.compression()) { ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(handlingSettings.compressionLevel())); } - ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(logger, transport.pipeliningMaxEvents, transport)); + ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.pipeliningMaxEvents, transport)); transport.serverAcceptedChannel(nettyHttpChannel); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4RestResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4RestResponse.java deleted file mode 100644 index 1a3ec4bdc4b75..0000000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4RestResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.http.netty4; - -import io.netty.handler.codec.http.HttpMessage; - -import org.elasticsearch.http.HttpResponse; - -public interface Netty4RestResponse extends HttpResponse, HttpMessage { - - int getSequence(); - - @Override - default void addHeader(String name, String value) { - headers().add(name, value); - } - - @Override - default boolean containsHeader(String name) { - return headers().contains(name); - } -} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index ea999ce0f471d..f5d566d977d09 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -197,7 +197,7 @@ private static boolean useUnpooled(long heapSizeInBytes, boolean g1gcEnabled, bo if (userForcedUnpooled()) { return true; } else if (userForcedPooled()) { - return true; + return false; } else if (heapSizeInBytes <= 1 << 30) { // If the heap is 1GB or less we use unpooled return true; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index aa6e51cf9fb00..9e0f30caec755 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -18,6 +18,8 @@ import io.netty.handler.codec.DecoderResult; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.DefaultHttpContent; +import io.netty.handler.codec.http.DefaultHttpResponse; +import io.netty.handler.codec.http.DefaultLastHttpContent; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpVersion; @@ -30,12 +32,14 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.bytes.ZeroBytesReference; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.http.HttpResponse; import org.elasticsearch.rest.ChunkedRestResponseBody; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.netty4.Netty4Utils; +import org.elasticsearch.transport.netty4.NettyAllocator; import org.junit.After; import java.nio.channels.ClosedChannelException; @@ -56,6 +60,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.mock; @@ -70,18 +75,18 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { @After public void tearDown() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); - // shutdown the Executor Service - if (handlerService.isShutdown() == false) { - handlerService.shutdown(); - handlerService.awaitTermination(10, TimeUnit.SECONDS); - } - if (eventLoopService.isShutdown() == false) { - eventLoopService.shutdown(); - eventLoopService.awaitTermination(10, TimeUnit.SECONDS); - } + terminateExecutorService(handlerService); + terminateExecutorService(eventLoopService); super.tearDown(); } + private void terminateExecutorService(ExecutorService executorService) throws InterruptedException { + if (executorService.isShutdown() == false) { + executorService.shutdown(); + assertTrue(executorService.awaitTermination(10, TimeUnit.SECONDS)); + } + } + private CountDownLatch finishRequest(String url) { waitingRequests.get(url).countDown(); return finishingRequests.get(url); @@ -92,7 +97,7 @@ public void testThatPipeliningWorksWithFastSerializedRequests() throws Interrupt final EmbeddedChannel embeddedChannel = makeEmbeddedChannelWithSimulatedWork(numberOfRequests); for (int i = 0; i < numberOfRequests; i++) { - embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + embeddedChannel.writeInbound(createHttpRequest("/" + i)); } final List latches = new ArrayList<>(); @@ -114,7 +119,7 @@ public void testThatPipeliningWorksWithFastSerializedRequests() throws Interrupt } private EmbeddedChannel makeEmbeddedChannelWithSimulatedWork(int numberOfRequests) { - return new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests, null) { + return new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests, null) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); @@ -127,7 +132,7 @@ public void testThatPipeliningWorksWhenSlowRequestsInDifferentOrder() throws Int final EmbeddedChannel embeddedChannel = makeEmbeddedChannelWithSimulatedWork(numberOfRequests); for (int i = 0; i < numberOfRequests; i++) { - embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + embeddedChannel.writeInbound(createHttpRequest("/" + i)); } // random order execution @@ -156,7 +161,7 @@ public void testThatPipeliningClosesConnectionWithTooManyEvents() throws Interru final EmbeddedChannel embeddedChannel = makeEmbeddedChannelWithSimulatedWork(numberOfRequests); for (int i = 0; i < 1 + numberOfRequests + 1; i++) { - embeddedChannel.writeInbound(createHttpRequest("/" + Integer.toString(i))); + embeddedChannel.writeInbound(createHttpRequest("/" + i)); } final List latches = new ArrayList<>(); @@ -178,9 +183,9 @@ public void testThatPipeliningClosesConnectionWithTooManyEvents() throws Interru assertFalse(embeddedChannel.isOpen()); } - public void testPipeliningRequestsAreReleased() throws InterruptedException { + public void testPipeliningRequestsAreReleased() { final int numberOfRequests = 10; - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests + 1, null)); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests + 1, null)); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + i)); @@ -197,7 +202,7 @@ public void testPipeliningRequestsAreReleased() throws InterruptedException { ChannelPromise promise = embeddedChannel.newPromise(); promises.add(promise); Netty4HttpRequest pipelinedRequest = requests.get(i); - Netty4HttpResponse resp = pipelinedRequest.createResponse(RestStatus.OK, BytesArray.EMPTY); + Netty4FullHttpResponse resp = pipelinedRequest.createResponse(RestStatus.OK, BytesArray.EMPTY); embeddedChannel.writeAndFlush(resp, promise); } @@ -211,6 +216,45 @@ public void testPipeliningRequestsAreReleased() throws InterruptedException { } } + public void testSmallFullResponsesAreSentDirectly() { + final List messagesSeen = new ArrayList<>(); + final var embeddedChannel = new EmbeddedChannel(capturingHandler(messagesSeen), getTestHttpHandler()); + embeddedChannel.writeInbound(createHttpRequest("/test")); + final Netty4HttpRequest request = embeddedChannel.readInbound(); + final var maxSize = (int) NettyAllocator.suggestedMaxAllocationSize() / 2; + final var content = new ZeroBytesReference(between(0, maxSize)); + final var response = request.createResponse(RestStatus.OK, content); + assertThat(response, instanceOf(FullHttpResponse.class)); + final var promise = embeddedChannel.newPromise(); + embeddedChannel.writeAndFlush(response, promise); + assertTrue(promise.isDone()); + assertThat(messagesSeen, hasSize(1)); + assertSame(response, messagesSeen.get(0)); + } + + public void testLargeFullResponsesAreSplit() { + final List messagesSeen = new ArrayList<>(); + final var embeddedChannel = new EmbeddedChannel(capturingHandler(messagesSeen), getTestHttpHandler()); + embeddedChannel.writeInbound(createHttpRequest("/test")); + final Netty4HttpRequest request = embeddedChannel.readInbound(); + final var minSize = (int) NettyAllocator.suggestedMaxAllocationSize(); + final var content = new ZeroBytesReference(between(minSize, (int) (minSize * 1.5))); + final var response = request.createResponse(RestStatus.OK, content); + assertThat(response, instanceOf(FullHttpResponse.class)); + final var promise = embeddedChannel.newPromise(); + embeddedChannel.writeAndFlush(response, promise); + assertTrue(promise.isDone()); + assertThat(messagesSeen, hasSize(3)); + final var headersMessage = asInstanceOf(DefaultHttpResponse.class, messagesSeen.get(0)); + assertEquals(RestStatus.OK.getStatus(), headersMessage.status().code()); + assertThat(headersMessage, not(instanceOf(FullHttpResponse.class))); + final var chunk1 = asInstanceOf(DefaultHttpContent.class, messagesSeen.get(1)); + final var chunk2 = asInstanceOf(DefaultLastHttpContent.class, messagesSeen.get(2)); + assertEquals(content.length(), chunk1.content().readableBytes() + chunk2.content().readableBytes()); + assertThat(chunk1, not(instanceOf(FullHttpResponse.class))); + assertThat(chunk2, not(instanceOf(FullHttpResponse.class))); + } + public void testDecoderErrorSurfacedAsNettyInboundError() { final EmbeddedChannel embeddedChannel = new EmbeddedChannel(getTestHttpHandler()); // a request with a decoder error @@ -304,7 +348,7 @@ public void testResumesSingleAfterChunkedMessage() { assertTrue(promise1.isDone()); assertThat(messagesSeen, hasSize(chunks1 + 1 + 1)); assertChunkedMessageAtIndex(messagesSeen, 0, chunks1, chunk); - assertThat(messagesSeen.get(chunks1 + 1), instanceOf(Netty4HttpResponse.class)); + assertThat(messagesSeen.get(chunks1 + 1), instanceOf(Netty4FullHttpResponse.class)); assertContentAtIndexEquals(messagesSeen, chunks1 + 1, single); assertTrue(promise2.isDone()); } @@ -339,7 +383,7 @@ public void testChunkedResumesAfterSingleMessage() { embeddedChannel.flush(); assertTrue(promise1.isDone()); assertThat(messagesSeen, hasSize(chunks2 + 2)); - assertThat(messagesSeen.get(0), instanceOf(Netty4HttpResponse.class)); + assertThat(messagesSeen.get(0), instanceOf(Netty4FullHttpResponse.class)); assertChunkedMessageAtIndex(messagesSeen, 1, chunks2, chunk); assertTrue(promise2.isDone()); } @@ -377,7 +421,7 @@ public void testChunkedWithSmallChunksResumesAfterSingleMessage() { embeddedChannel.flush(); assertTrue(promise1.isDone()); assertThat(messagesSeen, hasSize(chunks2 + 2)); - assertThat(messagesSeen.get(0), instanceOf(Netty4HttpResponse.class)); + assertThat(messagesSeen.get(0), instanceOf(Netty4FullHttpResponse.class)); assertChunkedMessageAtIndex(messagesSeen, 1, chunks2, chunk); assertTrue(promise2.isDone()); } @@ -410,7 +454,7 @@ public void testPipeliningRequestsAreReleasedAfterFailureOnChunked() { for (Netty4HttpRequest request : requests) { ChannelPromise promise = embeddedChannel.newPromise(); promises.add(promise); - Netty4HttpResponse resp = request.createResponse(RestStatus.OK, BytesArray.EMPTY); + Netty4FullHttpResponse resp = request.createResponse(RestStatus.OK, BytesArray.EMPTY); embeddedChannel.write(resp, promise); } assertFalse(chunkedWritePromise.isDone()); @@ -449,7 +493,7 @@ private static void assertDoneWithClosedChannel(ChannelPromise chunkedWritePromi } private Netty4HttpPipeliningHandler getTestHttpHandler() { - return new Netty4HttpPipeliningHandler(logger, Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class)) { + return new Netty4HttpPipeliningHandler(Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class)) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); @@ -478,9 +522,6 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec public String getResponseContentTypeString() { return "application/octet-stream"; } - - @Override - public void close() {} }; } @@ -525,7 +566,7 @@ protected void channelRead0(final ChannelHandlerContext ctx, Netty4HttpRequest r handlerService.submit(() -> { try { - waitingLatch.await(1000, TimeUnit.SECONDS); + assertTrue(waitingLatch.await(1000, TimeUnit.SECONDS)); final ChannelPromise promise = ctx.newPromise(); eventLoopService.submit(() -> { ctx.write(httpResponse, promise); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index c08d571eaf6bb..1215d54e9ace1 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -614,7 +614,7 @@ public void dispatchRequest(final RestRequest request, final RestChannel channel channel.sendResponse( RestResponse.chunked(OK, ChunkedRestResponseBody.fromXContent(ignored -> Iterators.single((builder, params) -> { throw new AssertionError("should not be called for HEAD REQUEST"); - }), ToXContent.EMPTY_PARAMS, channel, null)) + }), ToXContent.EMPTY_PARAMS, channel), null) ); } catch (IOException e) { throw new AssertionError(e); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 224436a388ce5..3e74a74dbd49c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockSocket; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportSettings; @@ -51,7 +52,7 @@ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); NetworkService networkService = new NetworkService(Collections.emptyList()); PageCacheRecycler recycler = new MockPageCacheRecycler(Settings.EMPTY); nettyTransport = new Netty4Transport( diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java index 256a5516a2ef2..b2475216a9ce7 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java @@ -8,6 +8,9 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.SDKGlobalConfiguration; +import com.amazonaws.util.StringUtils; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; @@ -24,7 +27,11 @@ class AwsEc2Utils { private static final Logger logger = LogManager.getLogger(AwsEc2Utils.class); - private static final int CONNECT_TIMEOUT = 2000; + // The timeout can be configured via the AWS_METADATA_SERVICE_TIMEOUT environment variable + private static final int TIMEOUT = Optional.ofNullable(System.getenv(SDKGlobalConfiguration.AWS_METADATA_SERVICE_TIMEOUT_ENV_VAR)) + .filter(StringUtils::hasValue) + .map(s -> Integer.parseInt(s) * 1000) + .orElse(2000); private static final int METADATA_TOKEN_TTL_SECONDS = 10; static final String X_AWS_EC_2_METADATA_TOKEN = "X-aws-ec2-metadata-token"; @@ -39,7 +46,10 @@ static Optional getMetadataToken(String metadataTokenUrl) { try { urlConnection = (HttpURLConnection) new URL(metadataTokenUrl).openConnection(); urlConnection.setRequestMethod("PUT"); - urlConnection.setConnectTimeout(CONNECT_TIMEOUT); + // Use both timeout for connect and read timeout analogous to AWS SDK. + // See com.amazonaws.internal.HttpURLConnection#connectToEndpoint + urlConnection.setConnectTimeout(TIMEOUT); + urlConnection.setReadTimeout(TIMEOUT); urlConnection.setRequestProperty("X-aws-ec2-metadata-token-ttl-seconds", String.valueOf(METADATA_TOKEN_TTL_SECONDS)); } catch (IOException e) { logger.warn("Unable to access the IMDSv2 URI: " + metadataTokenUrl, e); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index b9bea564e2720..41b848954b551 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -121,6 +121,19 @@ public void testTokenMetadataApiIsMisbehaving() throws Exception { } } + public void testTokenMetadataApiDoesNotRespond() throws Exception { + try (var metadataServer = new MetadataServer("/metadata", exchange -> { + assertNull(exchange.getRequestHeaders().getFirst("X-aws-ec2-metadata-token")); + exchange.sendResponseHeaders(200, 0); + exchange.getResponseBody().write("us-east-1c".getBytes(StandardCharsets.UTF_8)); + exchange.close(); + }, "/latest/api/token", ex -> { + // Intentionally don't close the connection, so the client has to time out + })) { + assertNodeAttributes(Settings.EMPTY, metadataServer.metadataUri(), metadataServer.tokenUri(), "us-east-1c"); + } + } + public void testTokenMetadataApiIsNotAvailable() throws Exception { try (var metadataServer = metadataServerWithoutToken()) { assertNodeAttributes(Settings.EMPTY, metadataServer.metadataUri(), metadataServer.tokenUri(), "us-east-1c"); diff --git a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java index 59131731d25e1..e142ba80147e0 100644 --- a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java +++ b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,6 +29,7 @@ public class ExampleRestHandlerPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers(final Settings settings, + final NamedWriteableRegistry namedWriteableRegistry, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index e858d43bcbc0a..b26efdf1f9a69 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -18,10 +18,6 @@ apply plugin: 'elasticsearch.internal-distribution-download' testFixtures.useFixture() -dependencies { - testImplementation project(':client:rest-high-level') -} - dockerCompose { environment.put 'STACK_VERSION', BuildParams.snapshotBuild ? VersionProperties.elasticsearch : VersionProperties.elasticsearch + "-SNAPSHOT" } diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index 8ad306144bd98..41dba06649ea1 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -40,8 +40,7 @@ tasks.named("yamlRestTest") { 'search.aggregation/220_filters_bucket/cache hits', // node_selector? 'search.aggregation/50_filter/Standard queries get cached', 'search.aggregation/50_filter/Terms lookup gets cached', // terms lookup by "index" doesn't seem to work correctly - 'search.aggregation/70_adjacency_matrix/Terms lookup', // terms lookup by "index" doesn't seem to work correctly - 'search/350_point_in_time/point-in-time with index filter' + 'search.aggregation/70_adjacency_matrix/Terms lookup' // terms lookup by "index" doesn't seem to work correctly ].join(',') } diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index 88740edffc09a..d91f7cf3e9a8d 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; @@ -259,7 +260,7 @@ static ClientYamlTestCandidate rewrite(ClientYamlTestCandidate clientYamlTestCan new ClientYamlTestSection( testSection.getLocation(), testSection.getName(), - testSection.getSkipSection(), + testSection.getPrerequisiteSection(), modifiedExecutableSections ) ); @@ -313,6 +314,14 @@ protected ClientYamlTestExecutionContext createRestTestExecutionContext( public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId) && searchTestFeatureService.clusterHasFeature(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return Sets.intersection( + testFeatureService.getAllSupportedFeatures(), + searchTestFeatureService.getAllSupportedFeatures() + ); + } }; final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index a331d6f54cb4a..ce11112bd4416 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; @@ -298,6 +299,14 @@ protected ClientYamlTestExecutionContext createRestTestExecutionContext( public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId) && searchTestFeatureService.clusterHasFeature(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return Sets.intersection( + testFeatureService.getAllSupportedFeatures(), + searchTestFeatureService.getAllSupportedFeatures() + ); + } }; final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) diff --git a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle index caa48db634f46..c48674831c422 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle +++ b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle @@ -15,10 +15,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' -dependencies { - testImplementation project(':client:rest-high-level') -} - BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> /** diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index 3b958bf9c8b9c..40e583a641aa0 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -11,7 +11,3 @@ testClusters.matching { it.name == "javaRestTest" }.configureEach { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' } - -dependencies { - javaRestTestImplementation project(":client:rest-high-level") -} diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 25bd24515a04b..d7ca24cd8c664 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -56,7 +56,6 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; -@SuppressWarnings("removal") public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase { private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); diff --git a/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java b/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java index 37c79fe2abb0b..4fbdfa65d40ba 100644 --- a/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java +++ b/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java @@ -8,6 +8,7 @@ package co.elastic.elasticsearch.test; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -18,12 +19,11 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.usage.UsageService; -import java.util.function.UnaryOperator; - public class CustomRestPlugin extends Plugin implements RestServerActionPlugin { private static final Logger logger = LogManager.getLogger(CustomRestPlugin.class); @@ -35,34 +35,33 @@ private static void echoHeader(String name, RestRequest request, ThreadContext t } } - public static class CustomInterceptor implements RestHandler { + public static class CustomInterceptor implements RestInterceptor { private final ThreadContext threadContext; - private final RestHandler delegate; - public CustomInterceptor(ThreadContext threadContext, RestHandler delegate) { + public CustomInterceptor(ThreadContext threadContext) { this.threadContext = threadContext; - this.delegate = delegate; } @Override - public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + public void intercept(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener listener) + throws Exception { logger.info("intercept request {} {}", request.method(), request.uri()); echoHeader("x-test-interceptor", request, threadContext); - delegate.handleRequest(request, channel, client); + listener.onResponse(Boolean.TRUE); } } public static class CustomController extends RestController { public CustomController( - UnaryOperator handlerWrapper, + RestInterceptor interceptor, NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, Tracer tracer ) { - super(handlerWrapper, client, circuitBreakerService, usageService, tracer); + super(interceptor, client, circuitBreakerService, usageService, tracer); } @Override @@ -74,19 +73,19 @@ public void dispatchRequest(RestRequest request, RestChannel channel, ThreadCont } @Override - public UnaryOperator getRestHandlerInterceptor(ThreadContext threadContext) { - return handler -> new CustomInterceptor(threadContext, handler); + public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) { + return new CustomInterceptor(threadContext); } @Override public RestController getRestController( - UnaryOperator handlerWrapper, + RestInterceptor interceptor, NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, Tracer tracer ) { - return new CustomController(handlerWrapper, client, circuitBreakerService, usageService, tracer); + return new CustomController(interceptor, client, circuitBreakerService, usageService, tracer); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 7c3292f6e85c6..acee204f571ba 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -61,7 +61,6 @@ public void testGeneratedPermissions() throws Exception { } /** test generated permissions for all configured paths */ - @SuppressWarnings("deprecation") // needs to check settings for deprecated path @SuppressForbidden(reason = "to create FilePermission object") public void testEnvironmentPaths() throws Exception { Path path = createTempDir(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 16209a73826ca..e94638bb17791 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -14,7 +14,6 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -266,10 +265,7 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - - var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) - .orElse(true); - assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -307,9 +303,7 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) - .orElse(true); - assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { @@ -605,7 +599,7 @@ public void testShrinkAfterUpgrade() throws IOException { *
  • Make sure the document count is correct * */ - public void testRollover() throws IOException { + public void testRollover() throws Exception { if (isRunningAgainstOldCluster()) { client().performRequest( newXContentRequest( @@ -637,9 +631,12 @@ public void testRollover() throws IOException { ) ); - assertThat( - EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v")).getEntity()), - containsString("testrollover-000002") + // assertBusy to work around https://github.com/elastic/elasticsearch/issues/104371 + assertBusy( + () -> assertThat( + EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v&error_trace")).getEntity()), + containsString("testrollover-000002") + ) ); } @@ -1213,9 +1210,7 @@ public void testClosedIndices() throws Exception { } @UpdateForV9 // This check can be removed (always assume true) - var originalClusterSupportsReplicationOfClosedIndices = parseLegacyVersion(getOldClusterVersion()).map( - v -> v.onOrAfter(Version.V_7_2_0) - ).orElse(true); + var originalClusterSupportsReplicationOfClosedIndices = oldClusterHasFeature(RestTestLegacyFeatures.REPLICATION_OF_CLOSED_INDICES); if (originalClusterSupportsReplicationOfClosedIndices) { ensureGreenLongWait(index); @@ -1621,9 +1616,7 @@ public void testResize() throws Exception { public void testSystemIndexMetadataIsUpgraded() throws Exception { @UpdateForV9 // assumeTrue can be removed (condition always true) - var originalClusterTaskIndexIsSystemIndex = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_10_0)) - .orElse(true); - + var originalClusterTaskIndexIsSystemIndex = oldClusterHasFeature(RestTestLegacyFeatures.TASK_INDEX_SYSTEM_INDEX); assumeTrue(".tasks became a system index in 7.10.0", originalClusterTaskIndexIsSystemIndex); final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " + "access to system indices will be prevented by default"; @@ -1745,8 +1738,7 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { */ @UpdateForV9 // This test can be removed in v9 public void testEnableSoftDeletesOnRestore() throws Exception { - var originalClusterDidNotEnforceSoftDeletes = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_8_0_0)) - .orElse(false); + var originalClusterDidNotEnforceSoftDeletes = oldClusterHasFeature(RestTestLegacyFeatures.SOFT_DELETES_ENFORCED) == false; assumeTrue("soft deletes must be enabled on 8.0+", originalClusterDidNotEnforceSoftDeletes); final String snapshot = "snapshot-" + index; @@ -1859,9 +1851,8 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { */ @UpdateForV9 public void testTransportCompressionSetting() throws IOException { - var originalClusterCompressSettingIsBoolean = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_7_14_0)) - .orElse(false); - assumeTrue("the old transport.compress setting existed before 7.14", originalClusterCompressSettingIsBoolean); + var originalClusterBooleanCompressSetting = oldClusterHasFeature(RestTestLegacyFeatures.NEW_TRANSPORT_COMPRESSED_SETTING) == false; + assumeTrue("the old transport.compress setting existed before 7.14", originalClusterBooleanCompressSetting); if (isRunningAgainstOldCluster()) { client().performRequest( newXContentRequest( diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index 05a2892717697..eff9dfc554133 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -26,6 +27,7 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.Set; import static org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus.OLD; import static org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus.UPGRADED; @@ -39,6 +41,8 @@ public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTest private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; private static boolean upgraded = false; + + private static Set oldClusterFeatures; private final FullClusterRestartUpgradeStatus requestedUpgradeStatus; public ParameterizedFullClusterRestartTestCase(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { @@ -50,6 +54,15 @@ public static Iterable parameters() throws Exception { return Arrays.stream(FullClusterRestartUpgradeStatus.values()).map(v -> new Object[] { v }).toList(); } + @Before + public void extractOldClusterFeatures() { + if (upgraded == false && oldClusterFeatures == null) { + assert testFeatureServiceInitialized() + : "Old cluster features can be extracted only after testFeatureService has been initialized. See ESRestTestCase#initClient"; + oldClusterFeatures = Set.copyOf(testFeatureService.getAllSupportedFeatures()); + } + } + @Before public void extractOldIndexVersion() throws Exception { if (upgraded == false) { @@ -111,6 +124,7 @@ public void maybeUpgrade() throws Exception { public static void resetUpgrade() { upgraded = false; upgradeFailed = false; + oldClusterFeatures = null; } public boolean isRunningAgainstOldCluster() { @@ -121,6 +135,15 @@ public static String getOldClusterVersion() { return OLD_CLUSTER_VERSION; } + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterFeatures != null : "Old cluster features cannot be accessed before initialization is completed"; + return oldClusterFeatures.contains(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + public static IndexVersion getOldClusterIndexVersion() { assert oldIndexVersion != null; return oldIndexVersion; diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index b2df6b0fa01a3..66c2a5c472ac2 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -42,6 +42,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.ClassRule; @@ -53,7 +54,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; /** @@ -255,10 +255,7 @@ public void testQueryBuilderBWC() throws Exception { ) { @UpdateForV9 // condition will always be true - var originalClusterHasTransportVersion = parseLegacyVersion(getOldClusterVersion()).map( - v -> v.onOrAfter(VERSION_INTRODUCING_TRANSPORT_VERSIONS) - ).orElse(true); - + var originalClusterHasTransportVersion = oldClusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED); final TransportVersion transportVersion; if (originalClusterHasTransportVersion == false) { transportVersion = TransportVersion.fromId( diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 1a726daa831b9..23c46c5804a6e 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -23,7 +23,8 @@ apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' dependencies { - testImplementation project(":client:rest-high-level") + testImplementation project(':modules:aggregations') + testImplementation project(':modules:parent-join') } def ccsSupportedVersion = bwcVersion -> { diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 9c5415f1d5ea9..b1d300134120b 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -17,8 +17,8 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.util.TimeUnits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -112,7 +112,6 @@ * such parameter, hence we want to verify that results are the same in both scenarios. */ @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs -@SuppressWarnings("removal") public class CCSDuelIT extends ESRestTestCase { private static final String INDEX_NAME = "ccs_duel_index"; @@ -199,7 +198,7 @@ private void indexDocuments(String idPrefix) throws IOException, InterruptedExce assertTrue(latch.await(30, TimeUnit.SECONDS)); - RefreshResponse refreshResponse = refresh(INDEX_NAME); + BroadcastResponse refreshResponse = refresh(INDEX_NAME); ElasticsearchAssertions.assertNoFailures(refreshResponse); } diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index df03b37c5a603..0475b7e0eeb80 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -17,10 +17,6 @@ apply plugin: 'elasticsearch.internal-distribution-download' testFixtures.useFixture() -dependencies { - testImplementation project(':client:rest-high-level') -} - tasks.register("copyNodeKeyMaterial", Sync) { from project(':x-pack:plugin:core') .files( diff --git a/qa/repository-multi-version/build.gradle b/qa/repository-multi-version/build.gradle index 8398e3b8aeb1a..c6c5d7212357f 100644 --- a/qa/repository-multi-version/build.gradle +++ b/qa/repository-multi-version/build.gradle @@ -15,10 +15,6 @@ apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.bwc-test' -dependencies { - testImplementation project(':client:rest-high-level') -} - BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> String oldClusterName = "${baseName}-old" String newClusterName = "${baseName}-new" diff --git a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java index c020cc118ca78..93dcd5a12d43d 100644 --- a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java +++ b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java @@ -45,7 +45,6 @@ *
  • Run against the current version cluster from the second step: {@link TestStep#STEP4_NEW_CLUSTER}
  • * */ -@SuppressWarnings("removal") public class MultiVersionRepositoryAccessIT extends ESRestTestCase { private enum TestStep { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 2d8ff8b747323..0487b282179a9 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -13,7 +13,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.features.FeatureService; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; import java.util.List; @@ -26,11 +26,11 @@ public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { - @BeforeClass - public static void checkMigrationVersion() { - assumeTrue( + @Before + public void checkMigrationVersion() { + assumeFalse( "This checks migrations from before cluster features were introduced", - getOldClusterVersion().before(FeatureService.CLUSTER_FEATURES_ADDED_VERSION) + oldClusterHasFeature(FeatureService.FEATURES_SUPPORTED) ); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 230ab39610b1e..73d91ac41fcb7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -10,17 +10,17 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodeWithStatus; -import org.elasticsearch.cluster.metadata.MetadataFeatures; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; @@ -48,13 +48,11 @@ private enum ProcessorsPrecision { } public void testUpgradeDesiredNodes() throws Exception { - assumeTrue("Desired nodes was introduced in 8.1", getOldClusterVersion().onOrAfter(Version.V_8_1_0)); + assumeTrue("Desired nodes was introduced in 8.1", oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_NODE_API_SUPPORTED)); - var featureVersions = new MetadataFeatures().getHistoricalFeatures(); - - if (getOldClusterVersion().onOrAfter(featureVersions.get(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED))) { + if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { assertUpgradedNodesCanReadDesiredNodes(); - } else if (getOldClusterVersion().onOrAfter(featureVersions.get(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED))) { + } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent(); } else { assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions(); @@ -84,12 +82,12 @@ private void assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent() throws 1238.49922909, ByteSizeValue.ofGb(32), ByteSizeValue.ofGb(128), - Version.CURRENT + clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() ) ) .toList(); - if (isMixedCluster() || isUpgradedCluster()) { + if (isMixedCluster()) { updateDesiredNodes(desiredNodes, desiredNodesVersion - 1); } for (int i = 0; i < 2; i++) { @@ -155,7 +153,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve processorsPrecision == ProcessorsPrecision.DOUBLE ? randomDoubleProcessorCount() : 0.5f, ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), - Version.CURRENT + clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() ) ) .toList(); @@ -169,7 +167,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), - Version.CURRENT + clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() ); }).toList(); } @@ -184,7 +182,7 @@ private void addClusterNodesToDesiredNodesWithIntegerProcessors(int version) thr randomIntBetween(1, 24), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), - Version.CURRENT + clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() ) ) .toList(); @@ -198,6 +196,11 @@ private void updateDesiredNodes(List nodes, int version) throws IOE builder.xContentList(UpdateDesiredNodesRequest.NODES_FIELD.getPreferredName(), nodes); builder.endObject(); request.setJsonEntity(Strings.toString(builder)); + request.setOptions( + expectVersionSpecificWarnings( + v -> v.compatible("[version removal] Specifying node_version in desired nodes requests is deprecated.") + ) + ); final var response = client().performRequest(request); final var statusCode = response.getStatusLine().getStatusCode(); assertThat(statusCode, equalTo(200)); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index afe3c1c5d22f1..47be0e5efff62 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -100,7 +100,14 @@ public void testGetFeatureUpgradeStatus() throws Exception { assertThat(feature, aMapWithSize(4)); assertThat(feature.get("minimum_index_version"), equalTo(getOldClusterIndexVersion().toString())); - if (getOldClusterVersion().before(TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_VERSION)) { + + // Feature migration happens only across major versions; also, we usually begin to require migrations once we start testing + // for the next major version upgrade (see e.g. #93666). Trying to express this with features may be problematic, so we + // want to keep using versions here. We also assume that for non-semantic version migrations are not required. + boolean migrationNeeded = parseLegacyVersion(getOldClusterVersion()).map( + v -> v.before(TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_VERSION) + ).orElse(false); + if (migrationNeeded) { assertThat(feature.get("migration_status"), equalTo("MIGRATION_NEEDED")); } else { assertThat(feature.get("migration_status"), equalTo("NO_MIGRATION_NEEDED")); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index d5b5e24e2ccde..273196f392064 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.ListMatcher; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -229,7 +230,7 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio } public void testTsdb() throws IOException { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); StringBuilder bulk = new StringBuilder(); if (isOldCluster()) { @@ -337,7 +338,7 @@ private void assertTsdbAgg(Matcher... expected) throws IOException { } public void testSyntheticSource() throws IOException { - assumeTrue("added in 8.4.0", getOldClusterVersion().onOrAfter(Version.V_8_4_0)); + assumeTrue("added in 8.4.0", oldClusterHasFeature(RestTestLegacyFeatures.SYNTHETIC_SOURCE_SUPPORTED)); if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 5a2c4c783ec85..1981b3e32cc34 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -15,7 +15,9 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -69,6 +71,7 @@ public static Iterable parameters() { } private static final Set upgradedNodes = new HashSet<>(); + private static final Set oldClusterFeatures = new HashSet<>(); private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; @@ -78,6 +81,13 @@ protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgrade this.requestedUpgradedNodes = upgradedNodes; } + @Before + public void extractOldClusterFeatures() { + if (isOldCluster() && oldClusterFeatures.isEmpty()) { + oldClusterFeatures.addAll(testFeatureService.getAllSupportedFeatures()); + } + } + @Before public void extractOldIndexVersion() throws Exception { if (oldIndexVersion == null && upgradedNodes.isEmpty()) { @@ -90,12 +100,13 @@ public void extractOldIndexVersion() throws Exception { Map nodeMap = objectPath.evaluate("nodes"); for (String id : nodeMap.keySet()) { Number ix = objectPath.evaluate("nodes." + id + ".index_version"); - IndexVersion version; + final IndexVersion version; if (ix != null) { version = IndexVersion.fromId(ix.intValue()); } else { // it doesn't have index version (pre 8.11) - just infer it from the release version - version = IndexVersion.fromId(getOldClusterVersion().id); + version = parseLegacyVersion(getOldClusterVersion()).map(v -> IndexVersion.fromId(v.id)) + .orElse(IndexVersions.MINIMUM_COMPATIBLE); } if (indexVersion == null) { @@ -138,11 +149,22 @@ public void upgradeNode() throws Exception { public static void resetNodes() { oldIndexVersion = null; upgradedNodes.clear(); + oldClusterFeatures.clear(); upgradeFailed = false; } - protected static org.elasticsearch.Version getOldClusterVersion() { - return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION); + @Deprecated // Use the new testing framework and oldClusterHasFeature(feature) instead + protected static String getOldClusterVersion() { + return OLD_CLUSTER_VERSION; + } + + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterFeatures.isEmpty() == false; + return oldClusterFeatures.contains(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); } protected static IndexVersion getOldClusterIndexVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 4b765849e6ea9..ef80643c82c0d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -13,7 +13,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.util.EntityUtils; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -24,6 +23,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -49,13 +49,10 @@ public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { } public void testSnapshotBasedRecovery() throws Exception { - - assumeFalse( - "Cancel shard allocation command is broken for initial desired balance versions and might allocate shard " - + "on the node where it is not supposed to be. Fixed by https://github.com/elastic/elasticsearch/pull/93635", - getOldClusterVersion() == Version.V_8_6_0 - || getOldClusterVersion() == Version.V_8_6_1 - || getOldClusterVersion() == Version.V_8_7_0 + assumeTrue( + "Cancel shard allocation command is broken for initial versions of the desired_balance allocator", + oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_SUPPORTED) == false + || oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_FIXED) ); final String indexName = "snapshot_based_recovery"; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index b42646164b335..3ce0fc79087c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -10,11 +10,11 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.time.Instant; @@ -130,10 +130,7 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { """; public void testTsdbDataStream() throws Exception { - assumeTrue( - "Skipping version [" + getOldClusterVersion() + "], because TSDB was GA-ed in 8.7.0", - getOldClusterVersion().onOrAfter(Version.V_8_7_0) - ); + assumeTrue("TSDB was GA-ed in 8.7.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE)); String dataStreamName = "k8s"; if (isOldCluster()) { final String INDEX_TEMPLATE = """ @@ -159,8 +156,9 @@ public void testTsdbDataStream() throws Exception { public void testTsdbDataStreamWithComponentTemplate() throws Exception { assumeTrue( - "Skipping version [" + getOldClusterVersion() + "], because TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", - getOldClusterVersion().onOrAfter(Version.V_8_7_0) && getOldClusterVersion().before(Version.V_8_11_0) + "TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", + oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE) + && (oldClusterHasFeature(RestTestLegacyFeatures.TSDB_EMPTY_TEMPLATE_FIXED) == false) ); String dataStreamName = "template-with-component-template"; if (isOldCluster()) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 9647bfb739164..3af344051030b 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -10,13 +10,13 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.Map; @@ -42,10 +42,7 @@ public void testOldIndexSettings() throws Exception { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME); createTestIndex.setJsonEntity("{\"settings\": {\"index.indexing.slowlog.level\": \"WARN\"}}"); createTestIndex.setOptions(expectWarnings(EXPECTED_WARNING)); - if (getOldClusterVersion().before(Version.V_8_0_0)) { - // create index with settings no longer valid in 8.0 - client().performRequest(createTestIndex); - } else { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED)) { assertTrue( expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() .contains("unknown setting [index.indexing.slowlog.level]") @@ -53,12 +50,15 @@ public void testOldIndexSettings() throws Exception { Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); client().performRequest(createTestIndex1); + } else { + // create index with settings no longer valid in 8.0 + client().performRequest(createTestIndex); } // add some data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { bulk.setOptions(expectWarnings(EXPECTED_WARNING)); } bulk.setJsonEntity(Strings.format(""" @@ -70,7 +70,7 @@ public void testOldIndexSettings() throws Exception { // add some more data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { bulk.setOptions(expectWarnings(EXPECTED_WARNING)); } bulk.setJsonEntity(Strings.format(""" @@ -79,7 +79,7 @@ public void testOldIndexSettings() throws Exception { """, INDEX_NAME)); client().performRequest(bulk); } else { - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME + "/_settings"); // update index settings should work createTestIndex.setJsonEntity("{\"index.indexing.slowlog.level\": \"INFO\"}"); @@ -117,7 +117,7 @@ private void assertCount(String index, int countAtLeast) throws IOException { public static void updateIndexSettingsPermittingSlowlogDeprecationWarning(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index + "/_settings"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(settings.build())); - if (getOldClusterVersion().before(Version.V_7_17_9)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.DEPRECATION_WARNINGS_LEAK_FIXED) == false) { // There is a bug (fixed in 7.17.9 and 8.7.0 where deprecation warnings could leak into ClusterApplierService#applyChanges) // Below warnings are set (and leaking) from an index in this test case request.setOptions(expectVersionSpecificWarnings(v -> { diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java index 7eb0a38ad8099..081135d6b1e17 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -153,6 +154,7 @@ public static class SystemIndexTestPlugin extends Plugin implements SystemIndexP @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java index 6a1f598c5e529..5c01d0fd430b4 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/TestResponseHeaderPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,6 +29,7 @@ public class TestResponseHeaderPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index e6325d8bad6f6..971d1bad3e976 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -127,6 +128,7 @@ private static XContentBuilder mappings() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index 79751e4b0f61b..9ced5d3e8c454 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -80,6 +80,10 @@ "type": "boolean", "description": "Sets require_alias for all incoming documents. Defaults to unset (false)" }, + "require_data_stream": { + "type": "boolean", + "description": "When true, requires the destination to be a data stream (existing or to-be-created). Default is false" + }, "list_executed_pipelines": { "type": "boolean", "description": "Sets list_executed_pipelines for all incoming documents. Defaults to unset (false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json new file mode 100644 index 0000000000000..5a13b44e8202d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json @@ -0,0 +1,38 @@ +{ + "connector.update_native": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html", + "description": "Updates the is_native flag of the connector." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_native", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's is_native flag", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json new file mode 100644 index 0000000000000..779fff1750276 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json @@ -0,0 +1,38 @@ +{ + "connector.update_service_type": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html", + "description": "Updates the service type of the connector." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_service_type", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's service type.", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json new file mode 100644 index 0000000000000..511e925a12e1d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json @@ -0,0 +1,28 @@ +{ + "connector_secret.delete": { + "documentation": { + "url": null, + "description": "Deletes a connector secret." + }, + "stability": "experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_connector/_secret/{id}", + "methods":[ "DELETE" ], + "parts":{ + "id":{ + "type":"string", + "description":"The ID of the secret" + } + } + } + ] + }, + "params":{} + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json new file mode 100644 index 0000000000000..f1037bedddfc6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json @@ -0,0 +1,28 @@ +{ + "connector_secret.get": { + "documentation": { + "url": null, + "description": "Retrieves a secret stored by Connectors." + }, + "stability": "experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_connector/_secret/{id}", + "methods":[ "GET" ], + "parts":{ + "id":{ + "type":"string", + "description":"The ID of the secret" + } + } + } + ] + }, + "params":{} + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json new file mode 100644 index 0000000000000..48657cf389446 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json @@ -0,0 +1,26 @@ +{ + "connector_secret.post": { + "documentation": { + "url": null, + "description": "Creates a secret for a Connector." + }, + "stability": "experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json" ] + }, + "url":{ + "paths":[ + { + "path":"/_connector/_secret", + "methods":[ "POST" ] + } + ] + }, + "params":{}, + "body": { + "description":"The secret value to store", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json new file mode 100644 index 0000000000000..85a2a46c8335d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json @@ -0,0 +1,44 @@ +{ + "esql.async_query":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-api.html", + "description":"Executes an ESQL request asynchronously" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_query/async", + "methods":[ + "POST" + ] + } + ] + }, + "params":{ + "format":{ + "type":"string", + "description":"a short version of the Accept header, e.g. json, yaml" + }, + "delimiter":{ + "type":"string", + "description":"The character to use between values within a CSV row. Only valid for the csv format.", + "default":false + }, + "drop_null_columns": { + "type": "boolean", + "description": "Should entirely null columns be removed from the results? Their name and type will be returning in a new `all_columns` section.", + "default": false + } + }, + "body":{ + "description":"Use the `query` element to start a query. Use `columnar` to format the answer.", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json new file mode 100644 index 0000000000000..c4670758f7fe9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json @@ -0,0 +1,44 @@ +{ + "esql.async_query_get":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-get-api.html", + "description": "Retrieves the results of a previously submitted async query request given its ID." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_query/async/{id}", + "methods":[ + "GET" + ], + "parts":{ + "id":{ + "type":"string", + "description":"The async query ID" + } + } + } + ] + }, + "params":{ + "wait_for_completion_timeout":{ + "type":"time", + "description":"Specify the time that the request should block waiting for the final response" + }, + "keep_alive": { + "type": "time", + "description": "Specify the time interval in which the results (partial or final) for this search will be available" + }, + "drop_null_columns": { + "type": "boolean", + "description": "Should entirely null columns be removed from the results? Their name and type will be returning in a new `all_columns` section.", + "default": false + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json index c038ac4f3b749..573fde5d9a9cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json @@ -29,10 +29,15 @@ "type":"string", "description":"The character to use between values within a CSV row. Only valid for the csv format.", "default":false + }, + "drop_null_columns": { + "type": "boolean", + "description": "Should entirely null columns be removed from the results? Their name and type will be returning in a new `all_columns` section.", + "default": false } }, "body":{ - "description":"Use the `query` element to start a query. Use `time_zone` to specify an execution time zone and `columnar` to format the answer.", + "description":"Use the `query` element to start a query. Use `columnar` to format the answer.", "required":true } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index bd94d653014a0..102ca4e012e85 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -101,6 +101,10 @@ "require_alias": { "type": "boolean", "description": "When true, requires destination to be an alias. Default is false" + }, + "require_data_stream": { + "type": "boolean", + "description": "When true, requires the destination to be a data stream (existing or to-be-created). Default is false" } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json new file mode 100644 index 0000000000000..e0361d30b5e73 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json @@ -0,0 +1,32 @@ +{ + "text_structure.test_grok_pattern": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-grok-pattern-api.html", + "description": "Tests a Grok pattern on some text." + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url": { + "paths": [ + { + "path": "/_text_structure/test_grok_pattern", + "methods": ["GET", "POST"] + } + ] + }, + "params": { + "ecs_compatibility": { + "type": "string", + "description": "Optional parameter to specify the compatibility mode with ECS Grok patterns - may be either 'v1' or 'disabled'" + } + }, + "body": { + "description": "The Grok pattern and text.", + "required": true + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index 500207e969146..d27abc3da7081 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -118,7 +118,7 @@ "Add data stream lifecycle": - skip: version: " - 8.10.99" - reason: "Data stream lifecycle was GA in 8.11" + reason: "Data stream lifecycle was available from 8.11" - do: cluster.put_component_template: @@ -146,7 +146,7 @@ "Get data stream lifecycle with default rollover": - skip: version: " - 8.10.99" - reason: "Data stream lifecycle was GA in 8.11" + reason: "Data stream lifecycle was available from 8.11" - do: cluster.put_component_template: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml index 28b79597cb2da..7e08f2dbe5423 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 8.2.99" - reason: "API added in in 8.1.0 but modified in 8.3" + version: " - 8.12.99" + reason: "API added in in 8.1.0 but modified in 8.13 (node_version field removed)" --- teardown: - do: @@ -22,6 +22,57 @@ teardown: nodes.info: {} - set: { nodes.$master.version: es_version } + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + - match: + $body: + history_id: "test" + version: 1 + nodes: + - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 2 + body: + nodes: + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } + - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb" } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + + - match: { history_id: "test" } + - match: { version: 2 } + - length: { nodes: 2 } + - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } +--- +"Test update desired nodes with node_version generates a warning": + - skip: + reason: "contains is a newly added assertion" + features: ["contains", "allowed_warnings"] + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + - do: _internal.update_desired_nodes: history_id: "test" @@ -29,6 +80,8 @@ teardown: body: nodes: - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + allowed_warnings: + - "[version removal] Specifying node_version in desired nodes requests is deprecated." - match: { replaced_existing_history_id: false } - do: @@ -48,6 +101,8 @@ teardown: nodes: - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } + allowed_warnings: + - "[version removal] Specifying node_version in desired nodes requests is deprecated." - match: { replaced_existing_history_id: false } - do: @@ -78,7 +133,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -88,7 +143,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } - do: _internal.update_desired_nodes: @@ -96,8 +151,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000188" }, processors: 16, memory: "128gb", storage: "1tb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000188" }, processors: 16, memory: "128gb", storage: "1tb" } - match: { replaced_existing_history_id: true } - do: @@ -105,8 +160,8 @@ teardown: - match: { history_id: "new_history" } - match: { version: 1 } - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } --- "Test delete desired nodes": - do: @@ -118,6 +173,44 @@ teardown: nodes.info: {} - set: { nodes.$master.version: es_version } + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + - match: + $body: + history_id: "test" + version: 1 + nodes: + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } + + - do: + _internal.delete_desired_nodes: {} + + - do: + catch: missing + _internal.get_desired_nodes: {} + - match: { status: 404 } +--- +"Test delete desired nodes with node_version generates a warning": + - skip: + features: allowed_warnings + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + - do: _internal.update_desired_nodes: history_id: "test" @@ -125,6 +218,8 @@ teardown: body: nodes: - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + allowed_warnings: + - "[version removal] Specifying node_version in desired nodes requests is deprecated." - match: { replaced_existing_history_id: false } - do: @@ -163,8 +258,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -173,8 +268,8 @@ teardown: - match: { history_id: "test" } - match: { version: 1 } - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } - do: _internal.update_desired_nodes: @@ -182,8 +277,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } @@ -193,8 +288,8 @@ teardown: - match: { history_id: "test" } - match: { version: 1 } - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } --- "Test update desired nodes is idempotent with different order": - skip: @@ -215,8 +310,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -225,8 +320,8 @@ teardown: - match: { history_id: "test" } - match: { version: 1 } - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } - do: _internal.update_desired_nodes: @@ -234,8 +329,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000188" }, processors: 8.0, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } @@ -245,8 +340,8 @@ teardown: - match: { history_id: "test" } - match: { version: 1 } - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 8.0, memory: "64gb", storage: "128gb" } } --- "Test going backwards within the same history is forbidden": - do: @@ -264,7 +359,7 @@ teardown: version: 2 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -274,7 +369,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187", "http.tcp.keep_idle": 100 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187", "http.tcp.keep_idle": 100 }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 409 } - match: { error.type: version_conflict_exception } - match: { error.reason: "version [1] has been superseded by version [2] for history [test]" } @@ -286,7 +381,7 @@ teardown: history_id: "test" version: 2 nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } --- "Test using the same version with different definition is forbidden": - do: @@ -304,7 +399,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -314,7 +409,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 64.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 64.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: illegal_argument_exception } - match: { error.reason: "Desired nodes with history [test] and version [1] already exists with a different definition" } @@ -326,94 +421,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } ---- -"Test settings are validated": - - skip: - version: "8.9.99 - " - reason: "We started skipping setting validations in 8.10" - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187", "http.tcp.keep_idle": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - match: { status: 400 } - - match: { error.type: illegal_argument_exception } - - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } - - match: { error.suppressed.0.reason: "Failed to parse value [-1000] for setting [http.tcp.keep_idle] must be >= -1" } ---- -"Test unknown settings are forbidden in known versions": - - skip: - version: "8.9.99 - " - reason: "We started skipping setting validations in 8.10" - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187", "unknown_setting": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - match: { status: 400 } - - match: { error.type: illegal_argument_exception } - - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } - - match: { error.suppressed.0.reason: "unknown setting [unknown_setting] please check that any required plugins are installed, or check the breaking changes documentation for removed settings" } ---- -"Test unknown settings are allowed in future versions": - - skip: - version: "8.9.99 - " - reason: "We started skipping setting validations in 8.10" - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187", "unknown_setting": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: "99.1.0" } - - match: { replaced_existing_history_id: false } ---- -"Test some settings can be overridden": - - skip: - version: "8.9.99 - " - reason: "We started skipping setting validations in 8.10" - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187", node.processors: 2048 }, processors: 2048, memory: "64gb", storage: "128gb", node_version: $es_version } - - match: { replaced_existing_history_id: false } + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb" } --- "Test external_id or node.name is required": - do: @@ -432,7 +440,7 @@ teardown: version: 1 body: nodes: - - { settings: { }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } - match: { error.caused_by.caused_by.reason: "[node.name] or [node.external_id] is missing or empty" } @@ -454,7 +462,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": " " }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": " " }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } - match: { error.caused_by.caused_by.reason: "[node.name] or [node.external_id] is missing or empty" } @@ -476,8 +484,8 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.external_id": "instance-000187"}, processors: 16.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb" } + - { settings: { "node.external_id": "instance-000187"}, processors: 16.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: illegal_argument_exception } - match: { error.reason: "Some nodes contain the same setting value [instance-000187] for [node.external_id]" } @@ -499,7 +507,7 @@ teardown: version: "asa" body: nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: illegal_argument_exception } - match: { error.reason: "Failed to parse long parameter [version] with value [asa]" } @@ -521,26 +529,11 @@ teardown: version: -1 body: nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: action_request_validation_exception } - match: { error.reason: "Validation Failed: 1: version must be positive;" } --- -"Test node version must be at least the current master version": - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb", node_version: "7.16.0" } - - match: { status: 400 } - - match: { error.type: illegal_argument_exception } - - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } - - match: - error.suppressed.0.reason: "/Illegal\\snode\\sversion.+$/" ---- "Test history_id must be present": - do: cluster.state: {} @@ -558,7 +551,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: action_request_validation_exception } - match: { error.reason: "Validation Failed: 1: historyID should not be empty;" } @@ -592,7 +585,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.external_id": "instance-000187", "node.roles": "data_hot" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000187", "node.roles": "data_hot" }, processors: 8.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: action_request_validation_exception } - match: { error.reason: "Validation Failed: 1: nodes must contain at least one master node;" } @@ -614,7 +607,7 @@ teardown: version: 1 body: nodes: - - { processors: 64.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { processors: 64.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -635,7 +628,7 @@ teardown: version: 1 body: nodes: - - { settings: null, processors: 64.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: null, processors: 64.0, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -656,7 +649,7 @@ teardown: version: 1 body: nodes: - - { settings: {}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: {}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -677,7 +670,7 @@ teardown: version: 1 body: nodes: - - { settings: {}, processors: 8.0, storage: "128gb", node_version: $es_version } + - { settings: {}, processors: 8.0, storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -698,7 +691,7 @@ teardown: version: 1 body: nodes: - - { settings: {}, processors: 8.0, memory: null, storage: "128gb", node_version: $es_version } + - { settings: {}, processors: 8.0, memory: null, storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -719,7 +712,7 @@ teardown: version: 1 body: nodes: - - { settings: {}, processors: 8.0, memory: "64gb", node_version: $es_version } + - { settings: {}, processors: 8.0, memory: "64gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -740,43 +733,7 @@ teardown: version: 1 body: nodes: - - { settings: {}, processors: 8, memory: "64gb", storage: null, node_version: $es_version } - - match: { status: 400 } - - match: { error.type: x_content_parse_exception } ---- -"Test node version is required": - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: {}, processors: 8, memory: "64gb", storage: "128gb" } - - match: { status: 400 } - - match: { error.type: x_content_parse_exception } ---- -"Test node version must have content": - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 64, memory: "1b", storage: "1b", node_version: " " } - - match: { status: 400 } - - match: { error.type: x_content_parse_exception } ---- -"Test node version can not be null": - - do: - catch: bad_request - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187"}, processors: 64, memory: "1b", storage: "1b", node_version: null } + - { settings: {}, processors: 8, memory: "64gb", storage: null } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -797,7 +754,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 16.0, max: 20.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 16.0, max: 20.0}, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - do: @@ -807,7 +764,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { name: "instance-000187" } }, processors_range: {min: 16.0, max: 20.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { name: "instance-000187" } }, processors_range: {min: 16.0, max: 20.0}, memory: "64gb", storage: "128gb" } --- "Test processors min and max are required": - do: @@ -827,7 +784,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: { }, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: { }, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -849,7 +806,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {max: 8.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {max: 8.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -870,7 +827,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 8.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 8.0}, memory: "64gb", storage: "128gb" } - do: _internal.get_desired_nodes: {} @@ -879,7 +836,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { name: "instance-000187" } }, processors_range: {min: 8.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { name: "instance-000187" } }, processors_range: {min: 8.0}, memory: "64gb", storage: "128gb" } --- "Test min processors should be less than or equal to max processors": - do: @@ -899,7 +856,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 8.0, max: 1.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 8.0, max: 1.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -921,7 +878,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: NaN, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: NaN, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -943,7 +900,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: Infinity, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: Infinity, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -965,7 +922,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: -Infinity, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: -Infinity, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -987,7 +944,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: NaN, max: 1.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: NaN, max: 1.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1009,7 +966,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: Infinity, max: 1.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: Infinity, max: 1.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1031,7 +988,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: -Infinity, max: 1.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: -Infinity, max: 1.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1053,7 +1010,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 0.0, max: 1.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 0.0, max: 1.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1075,7 +1032,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: NaN}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: NaN}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1097,7 +1054,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: Infinity}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: Infinity}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1119,7 +1076,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: -Infinity}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: -Infinity}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1141,7 +1098,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: 0.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors_range: {min: 1.0, max: 0.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1163,7 +1120,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: 1.0, processors_range: {min: 1.0, max: 2.0}, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: 1.0, processors_range: {min: 1.0, max: 2.0}, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } --- @@ -1185,6 +1142,6 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187", "node.roles": "unknown,other" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187", "node.roles": "unknown,other" }, processors: 8.5, memory: "64gb", storage: "128gb" } - match: { status: 400 } - match: { error.type: x_content_parse_exception } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/11_old_format.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/11_old_format.yml new file mode 100644 index 0000000000000..ed5ca16edad4e --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/11_old_format.yml @@ -0,0 +1,283 @@ +--- +setup: + - skip: + version: " - 8.2.99, 8.12.99 - " + reason: "API added in in 8.1.0, modified in 8.3 and then again in 8.13.0" +--- +teardown: + - do: + _internal.delete_desired_nodes: {} +--- +"Test update desired nodes": + - skip: + reason: "contains is a newly added assertion" + features: contains + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + - match: + $body: + history_id: "test" + version: 1 + nodes: + - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 2 + body: + nodes: + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + + - match: { history_id: "test" } + - match: { version: 2 } + - length: { nodes: 2 } + - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } +--- +"Test update move to a new history id": + - skip: + reason: "contains is a newly added assertion" + features: contains + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + - match: + $body: + history_id: "test" + version: 1 + nodes: + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + + - do: + _internal.update_desired_nodes: + history_id: "new_history" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187" }, processors: 8, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.external_id": "instance-000188" }, processors: 16, memory: "128gb", storage: "1tb", node_version: $es_version } + - match: { replaced_existing_history_id: true } + + - do: + _internal.get_desired_nodes: {} + - match: { history_id: "new_history" } + - match: { version: 1 } + - length: { nodes: 2 } + - contains: { nodes: { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } } + - contains: { nodes: { settings: { node: { external_id: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } +--- +"Test delete desired nodes": + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { replaced_existing_history_id: false } + + - do: + _internal.get_desired_nodes: {} + - match: + $body: + history_id: "test" + version: 1 + nodes: + - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + + - do: + _internal.delete_desired_nodes: {} + + - do: + catch: missing + _internal.get_desired_nodes: {} + - match: { status: 404 } +--- +"Test settings are validated": + - skip: + version: "8.9.99 - " + reason: "We started skipping setting validations in 8.10" + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187", "http.tcp.keep_idle": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } + - match: { error.suppressed.0.reason: "Failed to parse value [-1000] for setting [http.tcp.keep_idle] must be >= -1" } +--- +"Test unknown settings are forbidden in known versions": + - skip: + version: "8.9.99 - " + reason: "We started skipping setting validations in 8.10" + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187", "unknown_setting": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } + - match: { error.suppressed.0.reason: "unknown setting [unknown_setting] please check that any required plugins are installed, or check the breaking changes documentation for removed settings" } +--- +"Test unknown settings are allowed in future versions": + - skip: + version: "8.9.99 - " + reason: "We started skipping setting validations in 8.10" + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187", "unknown_setting": -1000 }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: "99.1.0" } + - match: { replaced_existing_history_id: false } +--- +"Test some settings can be overridden": + - skip: + version: "8.9.99 - " + reason: "We started skipping setting validations in 8.10" + - do: + cluster.state: {} + + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.version: es_version } + + - do: + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187", node.processors: 2048 }, processors: 2048, memory: "64gb", storage: "128gb", node_version: $es_version } + - match: { replaced_existing_history_id: false } +--- +"Test node version must be at least the current master version": + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187"}, processors: 8.0, memory: "64gb", storage: "128gb", node_version: "7.16.0" } + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Nodes with ids [instance-000187] in positions [0] contain invalid settings" } + - match: + error.suppressed.0.reason: "/Illegal\\snode\\sversion.+$/" +--- +"Test node version is required": + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: {}, processors: 8, memory: "64gb", storage: "128gb" } + - match: { status: 400 } + - match: { error.type: x_content_parse_exception } +--- +"Test node version must have content": + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187"}, processors: 64, memory: "1b", storage: "1b", node_version: " " } + - match: { status: 400 } + - match: { error.type: x_content_parse_exception } +--- +"Test node version can not be null": + - do: + catch: bad_request + _internal.update_desired_nodes: + history_id: "test" + version: 1 + body: + nodes: + - { settings: { "node.external_id": "instance-000187"}, processors: 64, memory: "1b", storage: "1b", node_version: null } + - match: { status: 400 } + - match: { error.type: x_content_parse_exception } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml index 339c0cef3e162..3723c5b2165ca 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml @@ -10,6 +10,9 @@ teardown: --- "Test dry run doesn't update empty desired nodes": + - skip: + version: " - 8.12.99" + reason: "version_node removed from version 8.13 onwards" - do: cluster.state: {} @@ -26,7 +29,7 @@ teardown: dry_run: true body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - match: { dry_run: true } @@ -37,6 +40,9 @@ teardown: --- "Test dry run doesn't update existing desired nodes": + - skip: + version: " - 8.12.99" + reason: "version_node removed from version 8.13 onwards" - do: cluster.state: {} @@ -52,7 +58,7 @@ teardown: version: 1 body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } - match: { replaced_existing_history_id: false } - match: { dry_run: false } @@ -63,7 +69,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } - do: _internal.update_desired_nodes: @@ -72,8 +78,8 @@ teardown: dry_run: "true" body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } + - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb" } - match: { replaced_existing_history_id: false } - match: { dry_run: true } @@ -84,7 +90,7 @@ teardown: history_id: "test" version: 1 nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } --- "Test validation works for dry run updates": - skip: @@ -112,10 +118,11 @@ teardown: - match: { error.type: illegal_argument_exception } - match: { error.reason: "Nodes with ids [instance-000245] in positions [0] contain invalid settings" } - match: { error.suppressed.0.reason: "unknown setting [random_setting] please check that any required plugins are installed, or check the breaking changes documentation for removed settings" } - - --- "Test misspelled dry run": + - skip: + version: " - 8.12.99" + reason: "version_node removed from version 8.13 onwards" - do: cluster.state: { } @@ -133,4 +140,4 @@ teardown: diy_run: "true" body: nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } + - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index f44461e7b8143..c69e22d274c8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -6,6 +6,9 @@ setup: indices.create: index: test body: + settings: + index: + number_of_shards: 2 mappings: properties: name: @@ -135,6 +138,172 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} --- +"nested kNN search inner_hits size > 1": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + + - do: + index: + index: test + id: "4" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "5" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "6" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + - do: + indices.refresh: { } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 3} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + + - match: { hits.hits.0.fields.name.0: "moose.jpg" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 5 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 5} + # All these initial matches are "moose.jpg", which has 3 nested vectors, but two are closest + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.2.fields.name.0: "moose.jpg"} + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.3.fields.name.0: "moose.jpg"} + - length: { hits.hits.3.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.3.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.3.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + # Rabbit only has one passage vector + - match: {hits.hits.4.fields.name.0: "rabbit.jpg"} + - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + k: 3 + num_candidates: 3 + filter: {term: {name: "cow.jpg"}} + inner_hits: {size: 3, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 1} + - match: { hits.hits.0._id: "1" } + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "1" } +--- +"nested kNN search inner_hits & boosting": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + features: close_to + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - close_to: { hits.hits.0._score: {value: 0.00909090, error: 0.00001} } + - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: {value: 0.00909090, error: 0.00001} } + - close_to: { hits.hits.1._score: {value: 0.0021519717, error: 0.00001} } + - close_to: { hits.hits.1.inner_hits.nested.hits.hits.0._score: {value: 0.0021519717, error: 0.00001} } + - close_to: { hits.hits.2._score: {value: 0.00001, error: 0.00001} } + - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00001, error: 0.00001} } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + boost: 2 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + - close_to: { hits.hits.0._score: {value: 0.0181818, error: 0.00001} } + - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: {value: 0.0181818, error: 0.00001} } + - close_to: { hits.hits.1._score: {value: 0.0043039434, error: 0.00001} } + - close_to: { hits.hits.1.inner_hits.nested.hits.hits.0._score: {value: 0.0043039434, error: 0.00001} } + - close_to: { hits.hits.2._score: {value: 0.00002, error: 0.00001} } + - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00002, error: 0.00001} } +--- "nested kNN search inner_hits & profiling": - skip: version: ' - 8.12.99' @@ -144,7 +313,6 @@ setup: index: test body: profile: true - _source: false fields: [ "name" ] knn: field: nested.vector diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml index 435291b454d08..5d07c0c8b5f9d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -186,7 +186,6 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } --- - "nested kNN search post-filtered on nested fields DOES NOT work": - do: search: @@ -211,3 +210,112 @@ setup: # TODO: fix it on Lucene level so nested knn respects num_candidates # or do pre-filtering - match: {hits.total.value: 0} +--- +"nested kNN search inner_hits size > 1": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + + - do: + index: + index: test + id: "4" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "5" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "6" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + - do: + indices.refresh: { } + + - do: + search: + index: test + size: 3 + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 5 + inner_hits: { size: 2, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 5} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + + + - do: + search: + index: test + size: 5 + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 5 + inner_hits: { size: 2, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 5} + # All these initial matches are "moose.jpg", which has 3 nested vectors, but two are closest + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.2.fields.name.0: "moose.jpg"} + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.3.fields.name.0: "moose.jpg"} + - length: { hits.hits.3.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.3.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.3.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + # Rabbit only has one passage vector + - match: {hits.hits.4.fields.name.0: "rabbit.jpg"} + - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index 948a6e04a128b..433592a32f963 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -249,7 +249,7 @@ setup: id: "1" body: name: cow.jpg - vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + vector: [1, 2, 3, 4, 5] - do: index: @@ -257,7 +257,7 @@ setup: id: "2" body: name: moose.jpg - vector: [-0.5, 10.0, -13, 14.8, 15.0] + vector: [1, 1, 1, 1, 1] - do: index: @@ -265,7 +265,7 @@ setup: id: "3" body: name: rabbit.jpg - vector: [0.5, 111.3, -13.0, 14.8, -156.0] + vector: [1, 2, 2, 2, 2] # We force merge into a single segment to make sure scores are more uniform # Each segment can have a different quantization error, which can affect scores and mip is especially sensitive to this @@ -286,7 +286,7 @@ setup: num_candidates: 3 k: 3 field: vector - query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + query_vector: [1, 2, 3, 4, 5] - length: {hits.hits: 3} @@ -303,7 +303,7 @@ setup: num_candidates: 3 k: 3 field: vector - query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + query_vector: [1, 2, 3, 4, 5] filter: { "term": { "name": "moose.jpg" } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml index a2d19a16ab85f..4f79cbabbbeb8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -515,3 +515,15 @@ setup: - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._id: "4" } - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._seq_no: 0 } - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._primary_term: 1 } +--- +"Test collapse with inner_hits and missing name fails": + - skip: + version: " - 8.12.99" + reason: fixed in 8.13 + - do: + catch: bad_request + search: + index: test + body: + collapse: { field: numeric_group, inner_hits: { size: 1 } } + sort: [{ sort: desc }] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml index 7e78450931df5..71ea09d33e81d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml @@ -23,6 +23,10 @@ setup: - do: indices.create: index: test2 + body: + settings: + index: + number_of_shards: 2 - do: index: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 45865ddd35ced..b0238922c206e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.ReferenceDocs; -import org.elasticsearch.common.logging.ChunkedLoggingStreamTests; +import org.elasticsearch.common.logging.ChunkedLoggingStreamTestUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.test.ESIntegTestCase; @@ -188,7 +188,7 @@ public void testTimestampAndParams() { public void testLogLocalHotThreads() { final var level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); assertThat( - ChunkedLoggingStreamTests.getDecodedLoggedBody( + ChunkedLoggingStreamTestUtils.getDecodedLoggedBody( logger, level, getTestName(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java index a3c1304cfbae9..38fe1f8f918f8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java @@ -9,9 +9,9 @@ package org.elasticsearch.action.admin.cluster.desirednodes; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.desirednodes.VersionConflictException; @@ -59,23 +59,19 @@ public void testUpdateDesiredNodes() { } public void testDryRunUpdateDoesNotUpdateEmptyDesiredNodes() { - UpdateDesiredNodesResponse dryRunResponse = updateDesiredNodes( - randomDryRunUpdateDesiredNodesRequest(Version.CURRENT, Settings.EMPTY) - ); + UpdateDesiredNodesResponse dryRunResponse = updateDesiredNodes(randomDryRunUpdateDesiredNodesRequest(Settings.EMPTY)); assertThat(dryRunResponse.dryRun(), is(equalTo(true))); expectThrows(ResourceNotFoundException.class, this::getLatestDesiredNodes); } public void testDryRunUpdateDoesNotUpdateExistingDesiredNodes() { - UpdateDesiredNodesResponse response = updateDesiredNodes(randomUpdateDesiredNodesRequest(Version.CURRENT, Settings.EMPTY)); + UpdateDesiredNodesResponse response = updateDesiredNodes(randomUpdateDesiredNodesRequest(Settings.EMPTY)); assertThat(response.dryRun(), is(equalTo(false))); DesiredNodes desiredNodes = getLatestDesiredNodes(); - UpdateDesiredNodesResponse dryRunResponse = updateDesiredNodes( - randomDryRunUpdateDesiredNodesRequest(Version.CURRENT, Settings.EMPTY) - ); + UpdateDesiredNodesResponse dryRunResponse = updateDesiredNodes(randomDryRunUpdateDesiredNodesRequest(Settings.EMPTY)); assertThat(dryRunResponse.dryRun(), is(equalTo(true))); assertEquals(getLatestDesiredNodes(), desiredNodes); @@ -182,7 +178,6 @@ public void testAtLeastOneMaterNodeIsExpected() { public void testUnknownSettingsAreAllowedInFutureVersions() { final var updateDesiredNodesRequest = randomUpdateDesiredNodesRequest( - Version.fromString("99.9.0"), Settings.builder().put("desired_nodes.random_setting", Integer.MIN_VALUE).build() ); @@ -202,11 +197,7 @@ public void testNodeProcessorsGetValidatedWithDesiredNodeProcessors() { randomList( 1, 20, - () -> randomDesiredNode( - Version.CURRENT, - Settings.builder().put(NODE_PROCESSORS_SETTING.getKey(), numProcessors).build(), - numProcessors - ) + () -> randomDesiredNode(Settings.builder().put(NODE_PROCESSORS_SETTING.getKey(), numProcessors).build(), numProcessors) ), false ); @@ -223,19 +214,6 @@ public void testNodeProcessorsGetValidatedWithDesiredNodeProcessors() { } } - public void testNodeVersionIsValidated() { - final var updateDesiredNodesRequest = randomUpdateDesiredNodesRequest(Version.CURRENT.previousMajor(), Settings.EMPTY); - - final IllegalArgumentException exception = expectThrows( - IllegalArgumentException.class, - () -> updateDesiredNodes(updateDesiredNodesRequest) - ); - assertThat(exception.getMessage(), containsString("Nodes with ids")); - assertThat(exception.getMessage(), containsString("contain invalid settings")); - assertThat(exception.getSuppressed().length > 0, is(equalTo(true))); - assertThat(exception.getSuppressed()[0].getMessage(), containsString("Illegal node version")); - } - public void testUpdateDesiredNodesTasksAreBatchedCorrectly() throws Exception { final Runnable unblockClusterStateUpdateThread = blockClusterStateUpdateThread(); @@ -271,9 +249,7 @@ public void testDeleteDesiredNodesTasksAreBatchedCorrectly() throws Exception { final List> deleteDesiredNodesFutures = new ArrayList<>(15); for (int i = 0; i < 15; i++) { - deleteDesiredNodesFutures.add( - client().execute(TransportDeleteDesiredNodesAction.TYPE, new TransportDeleteDesiredNodesAction.Request()) - ); + deleteDesiredNodesFutures.add(client().execute(TransportDeleteDesiredNodesAction.TYPE, new AcknowledgedRequest.Plain())); } for (ActionFuture future : deleteDesiredNodesFutures) { @@ -327,30 +303,25 @@ private UpdateDesiredNodesRequest randomUpdateDesiredNodesRequest() { } private UpdateDesiredNodesRequest randomUpdateDesiredNodesRequest(Settings settings) { - return randomUpdateDesiredNodesRequest(Version.CURRENT, settings); - } - - private UpdateDesiredNodesRequest randomUpdateDesiredNodesRequest(Version version, Settings settings) { return new UpdateDesiredNodesRequest( UUIDs.randomBase64UUID(), randomIntBetween(2, 20), - randomList(2, 10, () -> randomDesiredNode(version, settings)), + randomList(2, 10, () -> randomDesiredNode(settings)), false ); } - private UpdateDesiredNodesRequest randomDryRunUpdateDesiredNodesRequest(Version version, Settings settings) { + private UpdateDesiredNodesRequest randomDryRunUpdateDesiredNodesRequest(Settings settings) { return new UpdateDesiredNodesRequest( UUIDs.randomBase64UUID(), randomIntBetween(2, 20), - randomList(2, 10, () -> randomDesiredNode(version, settings)), + randomList(2, 10, () -> randomDesiredNode(settings)), true ); } private void deleteDesiredNodes() { - final TransportDeleteDesiredNodesAction.Request request = new TransportDeleteDesiredNodesAction.Request(); - client().execute(TransportDeleteDesiredNodesAction.TYPE, request).actionGet(); + client().execute(TransportDeleteDesiredNodesAction.TYPE, new AcknowledgedRequest.Plain()).actionGet(); } private DesiredNodes getLatestDesiredNodes() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java index b20f658a01510..e1804368c2cad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; @@ -166,7 +165,7 @@ public void testBanOnlyNodesWithOutstandingDescendantTasks() throws Exception { ActionFuture rootTaskFuture = client().execute(TransportTestAction.ACTION, rootRequest); Set pendingRequests = allowPartialRequest(rootRequest); TaskId rootTaskId = getRootTaskId(rootRequest); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(rootTaskId) .waitForCompletion(true) .execute(); @@ -215,10 +214,10 @@ public void testCancelTaskMultipleTimes() throws Exception { ActionFuture mainTaskFuture = client().execute(TransportTestAction.ACTION, rootRequest); TaskId taskId = getRootTaskId(rootRequest); allowPartialRequest(rootRequest); - CancelTasksResponse resp = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).waitForCompletion(false).get(); + ListTasksResponse resp = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).waitForCompletion(false).get(); assertThat(resp.getTaskFailures(), empty()); assertThat(resp.getNodeFailures(), empty()); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(true) .execute(); @@ -226,7 +225,7 @@ public void testCancelTaskMultipleTimes() throws Exception { allowEntireRequest(rootRequest); assertThat(cancelFuture.actionGet().getTaskFailures(), empty()); waitForRootTask(mainTaskFuture, false); - CancelTasksResponse cancelError = clusterAdmin().prepareCancelTasks() + ListTasksResponse cancelError = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(randomBoolean()) .get(); @@ -245,7 +244,7 @@ public void testDoNotWaitForCompletion() throws Exception { allowPartialRequest(rootRequest); } boolean waitForCompletion = randomBoolean(); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(waitForCompletion) .execute(); @@ -311,7 +310,7 @@ public void testRemoveBanParentsOnDisconnect() throws Exception { client().execute(TransportTestAction.ACTION, rootRequest); Set pendingRequests = allowPartialRequest(rootRequest); TaskId rootTaskId = getRootTaskId(rootRequest); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(rootTaskId) .waitForCompletion(true) .execute(); @@ -505,7 +504,7 @@ public void writeTo(StreamOutput out) throws IOException { public static class TransportTestAction extends HandledTransportAction { - public static ActionType ACTION = new ActionType<>("internal::test_action", TestResponse::new); + public static ActionType ACTION = new ActionType<>("internal::test_action"); private final TransportService transportService; private final NodeClient client; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 21497b2e6fcfb..884f6dbcd677e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -503,7 +502,7 @@ public void testTasksCancellation() throws Exception { ); logger.info("--> cancelling the main test task"); - CancelTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setActions(TEST_TASK_ACTION.name()).get(); + ListTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setActions(TEST_TASK_ACTION.name()).get(); assertEquals(1, cancelTasksResponse.getTasks().size()); expectThrows(TaskCancelledException.class, future); @@ -722,7 +721,7 @@ public void testTasksWaitForAllTask() throws Exception { .map(PersistentTasksCustomMetadata.PersistentTask::getExecutorNode) .collect(Collectors.toSet()); // Spin up a request to wait for all tasks in the cluster to make sure it doesn't cause an infinite loop - ListTasksResponse response = clusterAdmin().prepareListTasks().setWaitForCompletion(true).setTimeout(timeValueSeconds(10)).get(); + ListTasksResponse response = clusterAdmin().prepareListTasks().setWaitForCompletion(true).setTimeout(timeValueSeconds(1)).get(); // We expect the nodes that are running always-running-tasks to report FailedNodeException and fail to list their tasks assertThat(response.getNodeFailures().size(), equalTo(nodesRunningTasks.size())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java index 955ec4a0bbc99..4a076cb3b6e66 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java @@ -142,7 +142,7 @@ protected Collection> getPlugins() { return List.of(TestPlugin.class); } - private static final ActionType TEST_ACTION = ActionType.emptyResponse(TestTransportAction.NAME); + private static final ActionType TEST_ACTION = new ActionType<>(TestTransportAction.NAME); public static class TestPlugin extends Plugin implements ActionPlugin { volatile CyclicBarrier barrier; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java index 4d37f75894d56..e0805148a47e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -33,7 +34,7 @@ public void testClearIndicesCacheWithBlocks() { for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE)) { try { enableIndexBlock("test", blockSetting); - ClearIndicesCacheResponse clearIndicesCacheResponse = indicesAdmin().prepareClearCache("test") + BroadcastResponse clearIndicesCacheResponse = indicesAdmin().prepareClearCache("test") .setFieldDataCache(true) .setQueryCache(true) .setFieldDataCache(true) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java index 69d4f7aaef329..4e2fade87196f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.flush; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -44,7 +45,7 @@ public void testFlushWithBlocks() { )) { try { enableIndexBlock("test", blockSetting); - FlushResponse response = indicesAdmin().prepareFlush("test").get(); + BroadcastResponse response = indicesAdmin().prepareFlush("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java index a3474afc96c51..b5d8ef0308b91 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -50,7 +51,7 @@ public void testForceMergeWithBlocks() { for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY_ALLOW_DELETE)) { try { enableIndexBlock("test", blockSetting); - ForceMergeResponse response = indicesAdmin().prepareForceMerge("test").get(); + BaseBroadcastResponse response = indicesAdmin().prepareForceMerge("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { @@ -70,7 +71,7 @@ public void testForceMergeWithBlocks() { // Merging all indices is blocked when the cluster is read-only try { - ForceMergeResponse response = indicesAdmin().prepareForceMerge().get(); + BaseBroadcastResponse response = indicesAdmin().prepareForceMerge().get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java index 229558e9f4242..22bc37b2fb946 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -51,13 +51,13 @@ public void testForceMergeUUIDConsistent() throws IOException { assertThat(getForceMergeUUID(primary), nullValue()); assertThat(getForceMergeUUID(replica), nullValue()); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(index).setMaxNumSegments(1).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(index).setMaxNumSegments(1).get(); assertThat(forceMergeResponse.getFailedShards(), is(0)); assertThat(forceMergeResponse.getSuccessfulShards(), is(2)); // Force flush to force a new commit that contains the force flush UUID - final FlushResponse flushResponse = indicesAdmin().prepareFlush(index).setForce(true).get(); + final BroadcastResponse flushResponse = indicesAdmin().prepareFlush(index).setForce(true).get(); assertThat(flushResponse.getFailedShards(), is(0)); assertThat(flushResponse.getSuccessfulShards(), is(2)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java index 41abfc1219199..2067038e0fdd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.refresh; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -39,7 +40,7 @@ public void testRefreshWithBlocks() { )) { try { enableIndexBlock("test", blockSetting); - RefreshResponse response = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse response = indicesAdmin().prepareRefresh("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 6a2ab41fae5d6..3768daaa20ffc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -12,12 +12,12 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -67,8 +67,8 @@ public void testBulkRejectionLoadWithBackoff() throws Throwable { private void executeBulkRejectionLoad(int maxRetries, boolean rejectedExecutionExpected) throws Throwable { int numberOfAsyncOps = randomIntBetween(600, 700); final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); - final Set successfulResponses = Collections.newSetFromMap(new ConcurrentHashMap<>()); - final Set> failedResponses = Collections.newSetFromMap(new ConcurrentHashMap<>()); + final Set successfulResponses = ConcurrentCollections.newConcurrentSet(); + final Set> failedResponses = ConcurrentCollections.newConcurrentSet(); assertAcked(prepareCreate(INDEX_NAME)); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 8bd3a6cf02868..cfdaacfae9cfb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -10,12 +10,12 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -61,7 +61,7 @@ private void executeBulkRejectionLoad(BackoffPolicy backoffPolicy, boolean rejec final CorrelatingBackoffPolicy internalPolicy = new CorrelatingBackoffPolicy(backoffPolicy); int numberOfAsyncOps = randomIntBetween(600, 700); final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); - final Set responses = Collections.newSetFromMap(new ConcurrentHashMap<>()); + final Set responses = ConcurrentCollections.newConcurrentSet(); assertAcked(prepareCreate(INDEX_NAME)); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/CCSPointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/CCSPointInTimeIT.java index eff681f1f281b..4bd97f772e4c3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/CCSPointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/CCSPointInTimeIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -21,6 +22,7 @@ import org.elasticsearch.search.query.ThrowingQueryBuilder; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.transport.RemoteClusterAware; +import org.hamcrest.MatcherAssert; import java.util.ArrayList; import java.util.Collection; @@ -74,11 +76,15 @@ public void testBasic() { final Client localClient = client(LOCAL_CLUSTER); final Client remoteClient = client(REMOTE_CLUSTER); int localNumDocs = randomIntBetween(10, 50); - assertAcked(localClient.admin().indices().prepareCreate("local_test")); + assertAcked( + localClient.admin().indices().prepareCreate("local_test").setSettings(Settings.builder().put("index.number_of_shards", 3)) + ); indexDocs(localClient, "local_test", localNumDocs); int remoteNumDocs = randomIntBetween(10, 50); - assertAcked(remoteClient.admin().indices().prepareCreate("remote_test")); + assertAcked( + remoteClient.admin().indices().prepareCreate("remote_test").setSettings(Settings.builder().put("index.number_of_shards", 3)) + ); indexDocs(remoteClient, "remote_test", remoteNumDocs); boolean includeLocalIndex = randomBoolean(); List indices = new ArrayList<>(); @@ -107,19 +113,120 @@ public void testBasic() { SearchResponse.Clusters clusters = resp.getClusters(); int expectedNumClusters = 1 + (includeLocalIndex ? 1 : 0); - assertThat(clusters.getTotal(), equalTo(expectedNumClusters)); - assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(expectedNumClusters)); - assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), equalTo(0)); + MatcherAssert.assertThat(clusters.getTotal(), equalTo(expectedNumClusters)); + MatcherAssert.assertThat( + clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), + equalTo(expectedNumClusters) + ); + MatcherAssert.assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), equalTo(0)); if (includeLocalIndex) { SearchResponse.Cluster localCluster = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); assertNotNull(localCluster); - assertOneSuccessfulShard(localCluster); + assertAllSuccessfulShards(localCluster, 3, 0); } SearchResponse.Cluster remoteCluster = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteCluster); - assertOneSuccessfulShard(remoteCluster); + assertAllSuccessfulShards(remoteCluster, 3, 0); + } + ); + } finally { + closePointInTime(pitId); + } + } + + public void testOpenPITWithIndexFilter() { + final Client localClient = client(LOCAL_CLUSTER); + final Client remoteClient = client(REMOTE_CLUSTER); + + assertAcked( + localClient.admin().indices().prepareCreate("local_test").setSettings(Settings.builder().put("index.number_of_shards", 3)) + ); + localClient.prepareIndex("local_test").setId("1").setSource("value", "1", "@timestamp", "2024-03-01").get(); + localClient.prepareIndex("local_test").setId("2").setSource("value", "2", "@timestamp", "2023-12-01").get(); + localClient.admin().indices().prepareRefresh("local_test").get(); + + assertAcked( + remoteClient.admin().indices().prepareCreate("remote_test").setSettings(Settings.builder().put("index.number_of_shards", 3)) + ); + remoteClient.prepareIndex("remote_test").setId("1").setSource("value", "1", "@timestamp", "2024-01-01").get(); + remoteClient.prepareIndex("remote_test").setId("2").setSource("value", "2", "@timestamp", "2023-12-01").get(); + remoteClient.admin().indices().prepareRefresh("remote_test").get(); + + List indices = new ArrayList<>(); + indices.add(randomFrom("*", "local_*", "local_test")); + indices.add(randomFrom("*:*", "remote_cluster:*", "remote_cluster:remote_test")); + + OpenPointInTimeRequest request = new OpenPointInTimeRequest(indices.toArray(new String[0])); + request.keepAlive(TimeValue.timeValueMinutes(2)); + request.indexFilter(new RangeQueryBuilder("@timestamp").gte("2023-12-15")); + final OpenPointInTimeResponse response = client().execute(TransportOpenPointInTimeAction.TYPE, request).actionGet(); + String pitId = response.getPointInTimeId(); + + if (randomBoolean()) { + localClient.prepareIndex("local_test").setId("local_new").setSource().get(); + localClient.admin().indices().prepareRefresh().get(); + } + if (randomBoolean()) { + remoteClient.prepareIndex("remote_test").setId("remote_new").setSource().get(); + remoteClient.admin().indices().prepareRefresh().get(); + } + + try { + assertNoFailuresAndResponse( + localClient.prepareSearch() + .setPreference(null) + .setQuery(new MatchAllQueryBuilder()) + .setPointInTime(new PointInTimeBuilder(pitId)), + resp -> { + assertHitCount(resp, 2); + + SearchResponse.Clusters clusters = resp.getClusters(); + int expectedNumClusters = 2; + MatcherAssert.assertThat(clusters.getTotal(), equalTo(expectedNumClusters)); + MatcherAssert.assertThat( + clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), + equalTo(expectedNumClusters) + ); + MatcherAssert.assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), equalTo(0)); + + // both indices (local and remote) have shards, but there is a single shard left after can match + SearchResponse.Cluster localCluster = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertNotNull(localCluster); + assertAllSuccessfulShards(localCluster, 1, 0); + SearchResponse.Cluster remoteCluster = clusters.getCluster(REMOTE_CLUSTER); + assertNotNull(remoteCluster); + assertAllSuccessfulShards(remoteCluster, 1, 0); + } + ); + + assertNoFailuresAndResponse( + localClient.prepareSearch() + .setPreference(null) + // test the scenario where search also runs can match and filters additional shards out + .setPreFilterShardSize(1) + .setQuery(new RangeQueryBuilder("@timestamp").gte("2024-02-01")) + .setPointInTime(new PointInTimeBuilder(pitId)), + resp -> { + assertHitCount(resp, 1); + + SearchResponse.Clusters clusters = resp.getClusters(); + int expectedNumClusters = 2; + MatcherAssert.assertThat(clusters.getTotal(), equalTo(expectedNumClusters)); + MatcherAssert.assertThat( + clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), + equalTo(expectedNumClusters) + ); + MatcherAssert.assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), equalTo(0)); + + // both indices (local and remote) have shards, but there is a single shard left after can match + SearchResponse.Cluster localCluster = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertNotNull(localCluster); + assertAllSuccessfulShards(localCluster, 1, 0); + SearchResponse.Cluster remoteCluster = clusters.getCluster(REMOTE_CLUSTER); + assertNotNull(remoteCluster); + assertAllSuccessfulShards(remoteCluster, 1, 1); } ); } finally { @@ -180,16 +287,6 @@ public void testFailuresOnOneShardsWithPointInTime() throws ExecutionException, } } - private static void assertOneSuccessfulShard(SearchResponse.Cluster cluster) { - assertThat(cluster.getTotalShards(), equalTo(1)); - assertThat(cluster.getSuccessfulShards(), equalTo(1)); - assertThat(cluster.getFailedShards(), equalTo(0)); - assertThat(cluster.getFailures().size(), equalTo(0)); - assertThat(cluster.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); - assertFalse(cluster.isTimedOut()); - } - private static void assertOneFailedShard(SearchResponse.Cluster cluster, int totalShards) { assertThat(cluster.getSuccessfulShards(), equalTo(totalShards - 1)); assertThat(cluster.getFailedShards(), equalTo(1)); @@ -200,6 +297,17 @@ private static void assertOneFailedShard(SearchResponse.Cluster cluster, int tot assertFalse(cluster.isTimedOut()); } + private static void assertAllSuccessfulShards(SearchResponse.Cluster cluster, int numShards, int skippedShards) { + assertThat(cluster.getTotalShards(), equalTo(numShards)); + assertThat(cluster.getSkippedShards(), equalTo(skippedShards)); + assertThat(cluster.getSuccessfulShards(), equalTo(numShards)); + assertThat(cluster.getFailedShards(), equalTo(0)); + assertThat(cluster.getFailures().size(), equalTo(0)); + assertThat(cluster.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertFalse(cluster.isTimedOut()); + } + private String openPointInTime(String[] indices, TimeValue keepAlive) { OpenPointInTimeRequest request = new OpenPointInTimeRequest(indices).keepAlive(keepAlive); final OpenPointInTimeResponse response = client().execute(TransportOpenPointInTimeAction.TYPE, request).actionGet(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index dd71b82c106a8..5435389452a51 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -41,13 +41,13 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); }); @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ); assertResponse(client().search(searchRequest), searchResponse -> { assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java index 5ee25850fcc23..b89cea7dff089 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java @@ -82,7 +82,7 @@ public Response(StreamInput in) throws IOException { public static class TestAction extends TransportReplicationAction { private static final String ACTION_NAME = "internal:test-replication-action"; - private static final ActionType TYPE = new ActionType<>(ACTION_NAME, Response::new); + private static final ActionType TYPE = new ActionType<>(ACTION_NAME); @Inject public TestAction( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 9661f4ebb966d..cf8decc5655ec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -984,7 +984,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, List tags = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { tags.add("tag_" + i); - builders.add(prepareIndex("test").setId(i + "").setSource("tags", tags)); + builders.add(prepareIndex("test").setId(i + "").setSource("tags", List.copyOf(tags))); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 48ba897ebb76c..cc930cdad5950 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -166,11 +166,11 @@ public void testClusterInfoServiceCollectsInformation() { assertThat("some shard sizes are populated", shardSizes.values().size(), greaterThan(0)); for (DiskUsage usage : leastUsages.values()) { logger.info("--> usage: {}", usage); - assertThat("usage has be retrieved", usage.getFreeBytes(), greaterThan(0L)); + assertThat("usage has be retrieved", usage.freeBytes(), greaterThan(0L)); } for (DiskUsage usage : mostUsages.values()) { logger.info("--> usage: {}", usage); - assertThat("usage has be retrieved", usage.getFreeBytes(), greaterThan(0L)); + assertThat("usage has be retrieved", usage.freeBytes(), greaterThan(0L)); } for (Long size : shardSizes.values()) { logger.info("--> shard size: {}", size); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index b869b3a90fbce..409fbdd70333e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.UUIDs; @@ -63,6 +62,7 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.metadata.AliasMetadata.newAliasMetadataBuilder; import static org.elasticsearch.cluster.routing.RandomShardRoutingMutator.randomChange; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo; import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder; @@ -304,7 +304,7 @@ private IndexRoutingTable randomIndexRoutingTable(String index, String[] nodeIds } String nodeId = randomFrom(availableNodeIds); availableNodeIds.remove(nodeId); - indexShard.addShard(TestShardRouting.newShardRouting(index, i, nodeId, null, j == 0, newState, unassignedInfo)); + indexShard.addShard(shardRoutingBuilder(index, i, nodeId, j == 0, newState).withUnassignedInfo(unassignedInfo).build()); } builder.addIndexShard(indexShard); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/DesiredNodesSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/DesiredNodesSnapshotsIT.java index 460b728d64f0e..c024d7ffb9772 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/DesiredNodesSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/DesiredNodesSnapshotsIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.cluster; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.desirednodes.GetDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; @@ -70,8 +69,7 @@ private UpdateDesiredNodesRequest randomUpdateDesiredNodesRequest() { Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(), randomIntBetween(1, 10), ByteSizeValue.ofGb(randomIntBetween(16, 64)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - Version.CURRENT + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ), false diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java index d21ec3e343943..d8c91d770437f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.cluster; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -189,7 +189,7 @@ public void testNoMasterActions() throws Exception { internalCluster().clearDisruptionScheme(true); } - void checkUpdateAction(boolean autoCreateIndex, TimeValue timeout, ActionRequestBuilder builder) { + void checkUpdateAction(boolean autoCreateIndex, TimeValue timeout, RequestBuilder builder) { // we clean the metadata when loosing a master, therefore all operations on indices will auto create it, if allowed try { builder.get(); @@ -204,7 +204,7 @@ void checkUpdateAction(boolean autoCreateIndex, TimeValue timeout, ActionRequest } } - void checkWriteAction(ActionRequestBuilder builder) { + void checkWriteAction(RequestBuilder builder) { try { builder.get(); fail("Expected ClusterBlockException"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index 3869952bf3b7e..9fe9e004d0b7c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -12,9 +12,9 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -130,7 +130,7 @@ public void onFailure(Exception e) {} } private ActionFuture executeAndCancelCommittedPublication( - ActionRequestBuilder req + RequestBuilder req ) throws Exception { // Wait for no publication in progress to not accidentally cancel a publication different from the one triggered by the given // request. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java index 2c15ac9153ba2..efe3b097cae20 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RestHandlerNodesIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -38,6 +39,7 @@ public static class TestPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java index 8843e7ff39bc6..895a60133251f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java @@ -12,12 +12,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.refresh.TransportUnpromotableShardRefreshAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.TransportClosePointInTimeAction; import org.elasticsearch.action.search.TransportOpenPointInTimeAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -699,7 +699,7 @@ public void testRefreshFailsIfUnpromotableDisconnects() throws Exception { }); } - RefreshResponse response = indicesAdmin().prepareRefresh(INDEX_NAME).get(); + BroadcastResponse response = indicesAdmin().prepareRefresh(INDEX_NAME).get(); assertThat( "each unpromotable replica shard should be added to the shard failures", response.getFailedShards(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index c044fafe31efc..a998fafd517ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -196,7 +196,7 @@ public void testRestoreSnapshotAllocationDoesNotExceedWatermarkWithMultipleShard // reduce disk size of node 0 so that only 1 of 2 smallest shards can be allocated var usableSpace = shardSizes.sizes().get(1).size(); - getTestFileStore(dataNodeName).setTotalSpace(usableSpace + WATERMARK_BYTES + 1L); + getTestFileStore(dataNodeName).setTotalSpace(usableSpace + WATERMARK_BYTES); refreshDiskUsage(); final RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot("repo", "snap") @@ -301,7 +301,7 @@ private void refreshDiskUsage() { .getNodeMostAvailableDiskUsages() .values() .stream() - .allMatch(e -> e.getFreeBytes() > WATERMARK_BYTES)) { + .allMatch(e -> e.freeBytes() > WATERMARK_BYTES)) { assertAcked(clusterAdmin().prepareReroute()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index d3001f485846e..709f6b866ba28 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -10,17 +10,15 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESIntegTestCase; @@ -63,7 +61,7 @@ public void testIndexActions() throws Exception { assertThat(indexResponse.getIndex(), equalTo(getConcreteIndexName())); assertThat(indexResponse.getId(), equalTo("1")); logger.info("Refreshing"); - RefreshResponse refreshResponse = refresh(); + BroadcastResponse refreshResponse = refresh(); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); logger.info("--> index exists?"); @@ -72,7 +70,7 @@ public void testIndexActions() throws Exception { assertThat(indexExists("test1234565"), equalTo(false)); logger.info("Clearing cache"); - ClearIndicesCacheResponse clearIndicesCacheResponse = indicesAdmin().clearCache( + BroadcastResponse clearIndicesCacheResponse = indicesAdmin().clearCache( new ClearIndicesCacheRequest("test").fieldDataCache(true).queryCache(true) ).actionGet(); assertNoFailures(clearIndicesCacheResponse); @@ -80,7 +78,7 @@ public void testIndexActions() throws Exception { logger.info("Force Merging"); waitForRelocation(ClusterHealthStatus.GREEN); - ForceMergeResponse mergeResponse = forceMerge(); + BaseBroadcastResponse mergeResponse = forceMerge(); assertThat(mergeResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); GetResponse getResult; @@ -130,7 +128,7 @@ public void testIndexActions() throws Exception { client().index(new IndexRequest("test").id("2").source(source("2", "test2"))).actionGet(); logger.info("Flushing"); - FlushResponse flushResult = indicesAdmin().prepareFlush("test").get(); + BroadcastResponse flushResult = indicesAdmin().prepareFlush("test").get(); assertThat(flushResult.getSuccessfulShards(), equalTo(numShards.totalNumShards)); assertThat(flushResult.getFailedShards(), equalTo(0)); logger.info("Refreshing"); @@ -220,7 +218,7 @@ public void testBulk() throws Exception { assertThat(bulkResponse.getItems()[5].getIndex(), equalTo(getConcreteIndexName())); waitForRelocation(ClusterHealthStatus.GREEN); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); assertNoFailures(refreshResponse); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java index d4fe2fcb4d4c1..c9809574002c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -18,6 +17,7 @@ import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; @@ -641,7 +641,7 @@ public void testGetFieldsComplexField() throws Exception { ensureGreen(); logger.info("flushing"); - FlushResponse flushResponse = indicesAdmin().prepareFlush("my-index").setForce(true).get(); + BroadcastResponse flushResponse = indicesAdmin().prepareFlush("my-index").setForce(true).get(); if (flushResponse.getSuccessfulShards() == 0) { StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [").append( flushResponse.getTotalShards() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index c0263e273354f..ec9373120f491 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -92,7 +92,7 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.index.shard.IndexShardTestCase.getTranslog; import static org.elasticsearch.index.shard.IndexShardTestCase.recoverFromStore; import static org.elasticsearch.test.LambdaMatchers.falseWith; @@ -633,14 +633,12 @@ public static final IndexShard newIndexShard( } private static ShardRouting getInitializingShardRouting(ShardRouting existingShardRouting) { - ShardRouting shardRouting = newShardRouting( + ShardRouting shardRouting = shardRoutingBuilder( existingShardRouting.shardId(), existingShardRouting.currentNodeId(), - null, existingShardRouting.primary(), - ShardRoutingState.INITIALIZING, - existingShardRouting.allocationId() - ); + ShardRoutingState.INITIALIZING + ).withAllocationId(existingShardRouting.allocationId()).build(); shardRouting = shardRouting.updateUnassigned( new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "fake recovery"), RecoverySource.ExistingStoreRecoverySource.INSTANCE diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index 67e8d2fd75d65..fb22aaa3747c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -9,9 +9,9 @@ package org.elasticsearch.indices; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.index.IndexNotFoundException; @@ -47,10 +47,7 @@ public void setNow() { * of failing when index resolution with `now` is one day off, this method wraps calls with the assumption that * the day did not change during the test run. */ - public void dateSensitiveGet( - ActionRequestBuilder builder, - Consumer consumer - ) { + public void dateSensitiveGet(RequestBuilder builder, Consumer consumer) { Runnable dayChangeAssumption = () -> assumeTrue( "day changed between requests", ZonedDateTime.now(ZoneOffset.UTC).getDayOfYear() == now.getDayOfYear() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java index 3dd9feff9ce25..1c715beb04356 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java @@ -7,7 +7,7 @@ */ package org.elasticsearch.indices; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public void testDeletesAloneCanTriggerRefresh() throws Exception { prepareIndex("index").setId(Integer.toString(i)).setSource("field", "value").get(); } // Force merge so we know all merges are done before we start deleting: - ForceMergeResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).get(); + BaseBroadcastResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).get(); assertNoFailures(r); final RefreshStats refreshStats = shard.refreshStats(); for (int i = 0; i < 100; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 0b99e3ba3ffcf..62e6cb59994b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.indices; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -132,7 +132,7 @@ public void testQueryRewrite() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -202,7 +202,7 @@ public void testQueryRewriteMissingValues() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -269,7 +269,7 @@ public void testQueryRewriteDates() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -343,7 +343,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { assertCacheState(client, "index-3", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = client.admin() + BroadcastResponse forceMergeResponse = client.admin() .indices() .prepareForceMerge("index-1", "index-2", "index-3") .setFlush(true) @@ -424,7 +424,7 @@ public void testCanCache() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -529,7 +529,7 @@ public void testCacheWithFilteredAlias() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); client.prepareIndex("index").setId("1").setRouting("1").setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java index a328148180107..17b18bf9af1ee 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; @@ -54,9 +54,9 @@ public void testWaitIfOngoing() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(10); final CopyOnWriteArrayList errors = new CopyOnWriteArrayList<>(); for (int j = 0; j < 10; j++) { - indicesAdmin().prepareFlush("test").execute(new ActionListener() { + indicesAdmin().prepareFlush("test").execute(new ActionListener<>() { @Override - public void onResponse(FlushResponse flushResponse) { + public void onResponse(BroadcastResponse flushResponse) { try { // don't use assertAllSuccessful it uses a randomized context that belongs to a different thread assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index e5a8246ba6033..70cd143686dc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; @@ -82,7 +82,7 @@ public void testDynamicUpdates() throws Exception { indexRandom(true, false, indexRequests); logger.info("checking all the documents are there"); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh().get(); assertThat(refreshResponse.getFailedShards(), equalTo(0)); assertHitCount(prepareSearch("test").setSize(0), recCount); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 2935efb4808a7..22f987cc855cc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -15,9 +15,9 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; @@ -134,7 +134,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc } logger.info("Start Refresh"); // don't assert on failures here - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed: [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 165b776f1ebc1..5bbedd8dc5870 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.indices.settings; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.ClusterState; @@ -378,10 +378,7 @@ public void testEngineGCDeletesSetting() throws Exception { prepareIndex("test").setId("1").setSource("f", 1).setVersionType(VersionType.EXTERNAL).setVersion(1).get(); client().prepareDelete("test", "1").setVersionType(VersionType.EXTERNAL).setVersion(2).get(); // delete is still in cache this should fail - ActionRequestBuilder builder = prepareIndex("test").setId("1") - .setSource("f", 3) - .setVersionType(VersionType.EXTERNAL) - .setVersion(1); + RequestBuilder builder = prepareIndex("test").setId("1").setSource("f", 3).setVersionType(VersionType.EXTERNAL).setVersion(1); expectThrows(VersionConflictEngineException.class, builder); assertAcked(indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index a98297e8b49ae..e70c48ce8184e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -31,6 +30,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -1138,7 +1138,7 @@ public void testFilterCacheStats() throws Exception { }); flush("index"); logger.info("--> force merging to a single segment"); - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).setMaxNumSegments(1).get(); assertAllSuccessful(forceMergeResponse); logger.info("--> refreshing"); refresh(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java index fa4d4c0fbb669..2e515b07b59a0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java @@ -7,12 +7,15 @@ */ package org.elasticsearch.ingest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.plugins.IngestPlugin; @@ -26,18 +29,26 @@ import java.util.Map; import java.util.function.BiConsumer; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** * The purpose of this test is to verify that when a processor executes an operation asynchronously that * the expected result is the same as if the same operation happens synchronously. - * - * In this test two test processor are defined that basically do the same operation, but a single processor + *

    + * In this test two test processors are defined that basically do the same operation, but a single processor * executes asynchronously. The result of the operation should be the same and also the order in which the * bulk responses are returned should be the same as how the corresponding index requests were defined. + *

    + * As a further test, one document is dropped by the synchronous processor, and one document causes + * the asynchronous processor throw an exception. */ public class IngestAsyncProcessorIT extends ESSingleNodeTestCase { + private static final int DROPPED = 3; + + private static final int ERROR = 7; + @Override protected Collection> getPlugins() { return List.of(TestPlugin.class); @@ -58,11 +69,21 @@ public void testAsyncProcessorImplementation() { for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); assertThat(bulkResponse.getItems()[i].getId(), equalTo(id)); - GetResponse getResponse = client().get(new GetRequest("foobar", id)).actionGet(); - // The expected result of async test processor: - assertThat(getResponse.getSource().get("foo"), equalTo("bar-" + id)); - // The expected result of sync test processor: - assertThat(getResponse.getSource().get("bar"), equalTo("baz-" + id)); + if (i == DROPPED) { + UpdateResponse dropped = bulkResponse.getItems()[i].getResponse(); + assertThat(dropped.getId(), equalTo(id)); + assertThat(dropped.getResult(), equalTo(DocWriteResponse.Result.NOOP)); + } else if (i == ERROR) { + BulkItemResponse failure = bulkResponse.getItems()[i]; + assertThat(failure.getFailure().getId(), equalTo(id)); + assertThat(failure.getFailure().getMessage(), containsString("lucky number seven")); + } else { + GetResponse getResponse = client().get(new GetRequest("foobar", id)).actionGet(); + // The expected result of async test processor: + assertThat(getResponse.getSource().get("foo"), equalTo("bar-" + id)); + // The expected result of sync test processor: + assertThat(getResponse.getSource().get("bar"), equalTo("baz-" + id)); + } } } @@ -84,15 +105,20 @@ public Map getProcessors(Processor.Parameters paramet public void execute(IngestDocument ingestDocument, BiConsumer handler) { threadPool.generic().execute(() -> { String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); - if (usually()) { - try { - Thread.sleep(10); - } catch (InterruptedException e) { - // ignore + if (id.equals(String.valueOf(ERROR))) { + // lucky number seven always fails + handler.accept(ingestDocument, new RuntimeException("lucky number seven")); + } else { + if (usually()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + // ignore + } } + ingestDocument.setFieldValue("foo", "bar-" + id); + handler.accept(ingestDocument, null); } - ingestDocument.setFieldValue("foo", "bar-" + id); - handler.accept(ingestDocument, null); }); } @@ -110,8 +136,13 @@ public boolean isAsync() { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); - ingestDocument.setFieldValue("bar", "baz-" + id); - return ingestDocument; + if (id.equals(String.valueOf(DROPPED))) { + // lucky number three is always dropped + return null; + } else { + ingestDocument.setFieldValue("bar", "baz-" + id); + return ingestDocument; + } } @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index d47c68690bab8..782aafece4399 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -8,11 +8,11 @@ package org.elasticsearch.recovery; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -405,7 +405,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat private void refreshAndAssert() throws Exception { assertBusy(() -> { - RefreshResponse actionGet = indicesAdmin().prepareRefresh().get(); + BroadcastResponse actionGet = indicesAdmin().prepareRefresh().get(); assertAllSuccessful(actionGet); }, 5, TimeUnit.MINUTES); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java index bd69aebcd415e..baa721cbbabd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java @@ -9,12 +9,11 @@ package org.elasticsearch.recovery; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentType; @@ -43,12 +42,12 @@ public void testSimpleRecovery() throws Exception { NumShards numShards = getNumShards("test"); client().index(new IndexRequest("test").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); - FlushResponse flushResponse = indicesAdmin().flush(new FlushRequest("test")).actionGet(); + BroadcastResponse flushResponse = indicesAdmin().flush(new FlushRequest("test")).actionGet(); assertThat(flushResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(flushResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(flushResponse.getFailedShards(), equalTo(0)); client().index(new IndexRequest("test").id("2").source(source("2", "test"), XContentType.JSON)).actionGet(); - RefreshResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); + BroadcastResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java new file mode 100644 index 0000000000000..e85a7354f930d --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +public class RestControllerIT extends ESIntegTestCase { + @Override + protected boolean addMockHttpTransport() { + return false; // enable HTTP + } + + public void testHeadersEmittedWithChunkedResponses() throws IOException { + final var client = getRestClient(); + final var response = client.performRequest(new Request("GET", ChunkedResponseWithHeadersPlugin.ROUTE)); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertEquals(ChunkedResponseWithHeadersPlugin.HEADER_VALUE, response.getHeader(ChunkedResponseWithHeadersPlugin.HEADER_NAME)); + } + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), ChunkedResponseWithHeadersPlugin.class); + } + + public static class ChunkedResponseWithHeadersPlugin extends Plugin implements ActionPlugin { + + static final String ROUTE = "/_test/chunked_response_with_headers"; + static final String HEADER_NAME = "test-header"; + static final String HEADER_VALUE = "test-header-value"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return ChunkedResponseWithHeadersPlugin.class.getCanonicalName(); + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + return channel -> { + final var response = RestResponse.chunked( + RestStatus.OK, + ChunkedRestResponseBody.fromXContent( + params -> Iterators.single((b, p) -> b.startObject().endObject()), + request, + channel + ), + null + ); + response.addHeader(HEADER_NAME, HEADER_VALUE); + channel.sendResponse(response); + }; + } + }); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index a856ee36aadc2..5144aee654b31 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -47,7 +47,7 @@ public void testScroll() { assertNoFailures(response); if (respNum == 1) { // initial response. - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Terms terms = aggregations.get("f"); assertEquals(Math.min(numDocs, 3L), terms.getBucketByKey("0").getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index fc0a93ad3d290..5b8c238d7b7db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -66,7 +66,7 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t prepareSearch("idx").addAggregation(missing("missing_values").field("value")) .addAggregation(terms("values").field("value").collectMode(aggCollectionMode)), response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); Missing missing = aggs.get("missing_values"); assertNotNull(missing); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java index f22e0a2931634..3634005d37ba4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java @@ -43,7 +43,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value")) ).addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)), response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertNotNull(aggs); Terms terms = aggs.get("the_terms"); @@ -52,7 +52,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { List buckets = terms.getBuckets(); for (Terms.Bucket bucket : buckets) { - Aggregations subAggs = bucket.getAggregations(); + InternalAggregations subAggs = bucket.getAggregations(); assertNotNull(subAggs); Sum sum = subAggs.get("the_sum"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index da1376a300728..21a607f113f14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -19,9 +19,9 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory; @@ -136,7 +136,7 @@ public void testXContentResponse() throws Exception { StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); + Map aggs = classBucket.getAggregations().asMap(); assertTrue(aggs.containsKey("sig_terms")); SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); assertThat(agg.getBuckets().size(), equalTo(1)); @@ -331,7 +331,7 @@ public void testBackgroundVsSeparateSet( double score10Background = sigTerms1.getBucketByKey("0").getSignificanceScore(); double score11Background = sigTerms1.getBucketByKey("1").getSignificanceScore(); - Aggregations aggs = response2.getAggregations(); + InternalAggregations aggs = response2.getAggregations(); sigTerms0 = (SignificantTerms) ((InternalFilter) aggs.get("0")).getAggregations().getAsMap().get("sig_terms"); double score00SeparateSets = sigTerms0.getBucketByKey("0").getSignificanceScore(); @@ -386,7 +386,7 @@ public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristi assertThat(classes.getBuckets().size(), equalTo(2)); Iterator classBuckets = classes.getBuckets().iterator(); - Aggregations aggregations = classBuckets.next().getAggregations(); + InternalAggregations aggregations = classBuckets.next().getAggregations(); SignificantTerms sigTerms = aggregations.get("mySignificantTerms"); List classA = sigTerms.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index d40264d9facf0..02c45c4aade1b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -19,8 +19,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -1037,7 +1037,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { for (Bucket b : buckets) { assertThat(b, notNullValue()); assertThat(b.getDocCount(), equalTo(1L)); - Aggregations subAggs = b.getAggregations(); + InternalAggregations subAggs = b.getAggregations(); assertThat(subAggs, notNullValue()); assertThat(subAggs.asList().size(), equalTo(1)); Aggregation subAgg = subAggs.get("scripted"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java index 7509cf3815085..3c9fbca476c0d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java @@ -19,7 +19,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -44,7 +44,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase -abstract class BucketMetricsPipeLineAggregationTestCase extends ESIntegTestCase { +abstract class BucketMetricsPipeLineAggregationTestCase extends ESIntegTestCase { static final String SINGLE_VALUED_FIELD_NAME = "l_value"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 6562c485b9204..421a5d2d36254 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; -public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { @Override protected ExtendedStatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) { @@ -43,7 +43,7 @@ protected void assertResult( IntToDoubleFunction buckets, Function bucketKeys, int numBuckets, - ExtendedStatsBucket pipelineBucket + InternalExtendedStatsBucket pipelineBucket ) { double sum = 0; int count = 0; @@ -71,7 +71,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(ExtendedStatsBucket bucket) { + protected double getNestedMetric(InternalExtendedStatsBucket bucket) { return bucket.getAvg(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index c05390bac40ae..b4193b8f90e1f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; -public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase { private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 }; @@ -46,7 +46,7 @@ protected void assertResult( IntToDoubleFunction bucketValues, Function bucketKeys, int numBuckets, - PercentilesBucket pipelineBucket + InternalPercentilesBucket pipelineBucket ) { double[] values = new double[numBuckets]; for (int i = 0; i < numBuckets; ++i) { @@ -62,7 +62,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(PercentilesBucket bucket) { + protected double getNestedMetric(InternalPercentilesBucket bucket) { return bucket.percentile(50); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java index 7040f3bf115f3..cd87bd98a0926 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java @@ -14,7 +14,7 @@ import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.statsBucket; import static org.hamcrest.Matchers.equalTo; -public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { @Override protected StatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) { @@ -26,7 +26,7 @@ protected void assertResult( IntToDoubleFunction bucketValues, Function bucketKeys, int numBuckets, - StatsBucket pipelineBucket + InternalStatsBucket pipelineBucket ) { double sum = 0; int count = 0; @@ -52,7 +52,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(StatsBucket bucket) { + protected double getNestedMetric(InternalStatsBucket bucket) { return bucket.getAvg(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 97a400709cde7..68d00321848eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.search.basic; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.index.query.QueryBuilders; @@ -55,7 +55,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) createIndex("test"); } prepareIndex("test").setId(id).setSource("field", "test").get(); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); // at least one shard should be successful when refreshing assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 6ebfc61830269..6985ebb17386c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -14,8 +14,8 @@ import org.apache.lucene.tests.util.English; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -111,7 +111,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe } logger.info("Start Refresh"); // don't assert on failures here - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 33ef75b317e33..07d976437c24c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -13,8 +13,8 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -135,7 +135,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc ESIntegTestCase.NumShards numShards = getNumShards("test"); logger.info("Start Refresh"); // don't assert on failures here - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index c4b0346170949..303030a523662 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -12,10 +12,10 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; @@ -50,7 +50,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { for (int i = 0; i < 100; i++) { index(client(), Integer.toString(i), "test", i); } - RefreshResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); + BroadcastResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(test.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(test.numPrimaries)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java index cf8d81f406f91..eedda05dcb102 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -268,7 +268,7 @@ public void testCancel() throws Exception { final CancelTasksRequest cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTask.taskId()); cancelRequest.setWaitForCompletion(randomBoolean()); - final ActionFuture cancelFuture = client().admin().cluster().cancelTasks(cancelRequest); + final ActionFuture cancelFuture = client().admin().cluster().cancelTasks(cancelRequest); assertBusy(() -> { final Iterable transportServices = cluster("cluster_a").getInstances(TransportService.class); for (TransportService transportService : transportServices) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 8f178397f508b..1fe128da6889c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -112,7 +112,7 @@ public void testConsistentHitsWithSameSeed() throws Exception { CoreMatchers.equalTo(0) ); final int hitCount = response.getHits().getHits().length; - final SearchHit[] currentHits = response.getHits().getHits(); + final SearchHit[] currentHits = response.getHits().asUnpooled().getHits(); ArrayUtil.timSort(currentHits, (o1, o2) -> { // for tie-breaking we have to resort here since if the score is // identical we rely on collection order which might change. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 37c78ec568332..31524765d4e14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -22,7 +22,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.range.InternalGeoDistance; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.test.ESIntegTestCase; @@ -216,7 +216,7 @@ public void testGeoDistanceAggregation() throws IOException { .addRange(0, 25000) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalGeoDistance geoDistance = aggregations.get(name); assertNotNull(geoDistance); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java index 3dd9e68cf08af..f830ca9ac0cb6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -66,8 +67,9 @@ public void testSimpleNested() throws Exception { refresh(); assertResponse( - prepareSearch("test").setKnnSearch(List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null))) - .setAllowPartialSearchResults(false), + prepareSearch("test").setKnnSearch( + List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null).innerHit(new InnerHitBuilder())) + ).setAllowPartialSearchResults(false), response -> assertThat(response.getHits().getHits().length, greaterThan(0)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java index c6d3a6733d2fc..65393f4185ce8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java @@ -39,6 +39,7 @@ public class DfsProfilerIT extends ESIntegTestCase { private static final int KNN_DIM = 3; + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104235") public void testProfileDfs() throws Exception { String textField = "text_field"; String numericField = "number"; @@ -65,25 +66,23 @@ public void testProfileDfs() throws Exception { int iters = between(5, 10); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(List.of(textField), List.of(numericField), numDocs, 3); + KnnSearchBuilder knnSearchBuilder = new KnnSearchBuilder( + vectorField, + new float[] { randomFloat(), randomFloat(), randomFloat() }, + randomIntBetween(5, 10), + 50, + randomBoolean() ? null : randomFloat() + ); + if (randomBoolean()) { + knnSearchBuilder.addFilterQuery(q); + } logger.info("Query: {}", q); assertResponse( prepareSearch().setQuery(q) .setTrackTotalHits(true) .setProfile(true) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setKnnSearch( - randomList( - 2, - 5, - () -> new KnnSearchBuilder( - vectorField, - new float[] { randomFloat(), randomFloat(), randomFloat() }, - randomIntBetween(5, 10), - 50, - randomBoolean() ? null : randomFloat() - ) - ) - ), + .setKnnSearch(randomList(2, 5, () -> knnSearchBuilder)), response -> { assertNotNull("Profile response element should not be null", response.getProfileResults()); assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 81659323e2471..20c5c11f36756 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -12,13 +12,13 @@ import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.FieldMemoryStats; import org.elasticsearch.common.settings.Settings; @@ -1267,7 +1267,7 @@ public void testPrunedSegments() throws IOException { .get(); // we have 2 docs in a segment... prepareIndex(INDEX).setId("2").setSource(jsonBuilder().startObject().field("somefield", "somevalue").endObject()).get(); - ForceMergeResponse actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get(); assertAllSuccessful(actionGet); refresh(); // update the first one and then merge.. the target segment will have no value in FIELD diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java index b126e4e51128f..df4d52727384f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -20,6 +19,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; @@ -159,7 +159,7 @@ public void testForceMergeCausesFullSnapshot() throws Exception { clusterAdmin().prepareCreateSnapshot(repo, snapshot1).setIndices(indexName).setWaitForCompletion(true).get(); logger.info("--> force merging down to a single segment"); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).setFlush(true).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).setFlush(true).get(); assertThat(forceMergeResponse.getFailedShards(), is(0)); final String snapshot2 = "snap-2"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 8d2e15f5027d5..1152cf5f03e5a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -17,17 +17,24 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexClusterStateUpdateRequest; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.cluster.metadata.MetadataDeleteIndexService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.discovery.AbstractDisruptionTestCase; +import org.elasticsearch.index.Index; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryConflictException; @@ -36,6 +43,7 @@ import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.transport.MockTransportService; @@ -48,10 +56,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; @@ -2060,6 +2070,106 @@ public void testQueuedSnapshotAfterPartialWithIndexRecreate() throws Exception { assertSuccessful(partialFuture); } + public void testDeleteIndexWithOutOfOrderFinalization() { + + final var indexToDelete = "index-to-delete"; + final var indexNames = List.of(indexToDelete, "index-0", "index-1", "index-2"); + + for (final var indexName : indexNames) { + assertAcked(prepareCreate(indexName, indexSettingsNoReplicas(1))); + } + + final var repoName = "test-repo"; + createRepository(repoName, "fs"); + + // block the update-shard-snapshot-status requests so we can execute them in a specific order + final var masterTransportService = MockTransportService.getInstance(internalCluster().getMasterName()); + final Map> otherIndexSnapshotListeners = indexNames.stream() + .collect(Collectors.toMap(k -> k, k -> new SubscribableListener<>())); + masterTransportService.addRequestHandlingBehavior( + SnapshotsService.UPDATE_SNAPSHOT_STATUS_ACTION_NAME, + (handler, request, channel, task) -> { + final var indexName = request.shardId().getIndexName(); + if (indexName.equals(indexToDelete)) { + handler.messageReceived(request, channel, task); + } else { + final var listener = otherIndexSnapshotListeners.get(indexName); + assertNotNull(indexName, listener); + listener.addListener( + ActionTestUtils.assertNoFailureListener(ignored -> handler.messageReceived(request, channel, task)) + ); + } + } + ); + + // start the snapshots, each targeting index-to-delete and one other index so we can control their finalization order + final var snapshotCompleters = new HashMap(); + for (final var blockingIndex : List.of("index-0", "index-1", "index-2")) { + final var snapshotName = "snapshot-with-" + blockingIndex; + final var snapshotFuture = clusterAdmin().prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true) + .setPartial(true) + .setIndices(indexToDelete, blockingIndex) + .execute(); + + // ensure each snapshot has really started before moving on to the next one + safeAwait( + ClusterServiceUtils.addTemporaryStateListener( + internalCluster().getInstance(ClusterService.class), + cs -> SnapshotsInProgress.get(cs) + .forRepo(repoName) + .stream() + .anyMatch(e -> e.snapshot().getSnapshotId().getName().equals(snapshotName)) + ) + ); + + snapshotCompleters.put(blockingIndex, () -> { + assertFalse(snapshotFuture.isDone()); + otherIndexSnapshotListeners.get(blockingIndex).onResponse(null); + assertEquals(SnapshotState.SUCCESS, snapshotFuture.actionGet(10, TimeUnit.SECONDS).getSnapshotInfo().state()); + }); + } + + // set up to delete the index at a very specific moment during finalization + final var masterDeleteIndexService = internalCluster().getCurrentMasterNodeInstance(MetadataDeleteIndexService.class); + final var indexRecreatedListener = ClusterServiceUtils + // wait until the snapshot has entered finalization + .addTemporaryStateListener( + internalCluster().getInstance(ClusterService.class), + cs -> SnapshotsInProgress.get(cs) + .forRepo(repoName) + .stream() + .anyMatch(e -> e.snapshot().getSnapshotId().getName().equals("snapshot-with-index-1") && e.state().completed()) + ) + // execute the index deletion _directly on the master_ so it happens before the snapshot finalization executes + .andThen((l, ignored) -> masterDeleteIndexService.deleteIndices(new DeleteIndexClusterStateUpdateRequest(l.map(r -> { + assertTrue(r.isAcknowledged()); + return null; + })).indices(new Index[] { internalCluster().clusterService().state().metadata().index(indexToDelete).getIndex() }) + .ackTimeout(TimeValue.timeValueSeconds(10)) + .masterNodeTimeout(TimeValue.timeValueSeconds(10)))) + // ultimately create the index again so that taking a full snapshot will pick up any missing shard gen blob, and deleting that + // full snapshot will clean up all dangling shard-level blobs + .andThen((l, ignored) -> prepareCreate(indexToDelete, indexSettingsNoReplicas(1)).execute(l.map(r -> { + assertTrue(r.isAcknowledged()); + return null; + }))); + + // release the snapshots to be finalized, in this order + for (final var blockingIndex : List.of("index-1", "index-2", "index-0")) { + snapshotCompleters.get(blockingIndex).run(); + } + + safeAwait(indexRecreatedListener); + masterTransportService.clearAllRules(); + + // create a full snapshot to verify that the repo is still ok + createFullSnapshot(repoName, "final-full-snapshot"); + + // delete the full snapshot to clean up the leftover shard-level metadata (which trips repo consistency assertions otherwise) + startDeleteSnapshot(repoName, "final-full-snapshot").actionGet(10, TimeUnit.SECONDS); + } + private static void assertSnapshotStatusCountOnRepo(String otherBlockedRepoName, int count) { final SnapshotsStatusResponse snapshotsStatusResponse = clusterAdmin().prepareSnapshotStatus(otherBlockedRepoName).get(); final List snapshotStatuses = snapshotsStatusResponse.getSnapshots(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index ed070c3224aa2..c13891728f315 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -21,11 +21,11 @@ import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.RestoreInProgress; @@ -119,7 +119,7 @@ public void testBasicWorkFlow() throws Exception { createIndexWithRandomDocs("test-idx-2", 100); createIndexWithRandomDocs("test-idx-3", 100); - ActionFuture flushResponseFuture = null; + ActionFuture flushResponseFuture = null; if (randomBoolean()) { ArrayList indicesToFlush = new ArrayList<>(); for (int i = 1; i < 4; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 841f77ea7efab..422aa757656ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -11,6 +11,11 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.InstrumentType; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -19,12 +24,18 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; +import java.util.List; +import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.in; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class SimpleThreadPoolIT extends ESIntegTestCase { @@ -33,6 +44,11 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().build(); } + @Override + protected Collection> nodePlugins() { + return List.of(TestTelemetryPlugin.class); + } + public void testThreadNames() throws Exception { ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); Set preNodeStartThreadNames = new HashSet<>(); @@ -95,4 +111,67 @@ public void testThreadNames() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104652") + public void testThreadPoolMetrics() throws Exception { + internalCluster().startNode(); + + final String dataNodeName = internalCluster().getRandomNodeName(); + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + + logger.info("do some indexing, flushing, optimize, and searches"); + int numDocs = randomIntBetween(2, 100); + IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; ++i) { + builders[i] = prepareIndex("idx").setSource( + jsonBuilder().startObject() + .field("str_value", "s" + i) + .array("str_values", new String[] { "s" + (i * 2), "s" + (i * 2 + 1) }) + .field("l_value", i) + .array("l_values", new int[] { i * 2, i * 2 + 1 }) + .field("d_value", i) + .array("d_values", new double[] { i * 2, i * 2 + 1 }) + .endObject() + ); + } + indexRandom(true, builders); + int numSearches = randomIntBetween(2, 100); + for (int i = 0; i < numSearches; i++) { + assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("str_value", "s" + i))); + assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); + } + final var tp = internalCluster().getInstance(ThreadPool.class, dataNodeName); + ThreadPoolStats tps = tp.stats(); + plugin.collect(); + ArrayList registeredMetrics = plugin.getRegisteredMetrics(InstrumentType.LONG_GAUGE); + registeredMetrics.addAll(plugin.getRegisteredMetrics(InstrumentType.LONG_ASYNC_COUNTER)); + tps.forEach(stats -> { + Map threadPoolMetrics = Map.of( + ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, + stats.completed(), + ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, + (long) stats.active(), + ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, + (long) stats.threads(), + ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, + (long) stats.largest(), + ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, + (long) stats.queue() + ); + threadPoolMetrics.forEach((suffix, value) -> { + String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + stats.name() + suffix; + List measurements; + if (suffix.equals(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED)) { + measurements = plugin.getLongAsyncCounterMeasurement(metricName); + } else { + measurements = plugin.getLongGaugeMeasurement(metricName); + } + assertThat(metricName, in(registeredMetrics)); + assertThat(measurements.get(0).getLong(), greaterThanOrEqualTo(value)); + }); + }); + } + } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 819e14c176975..41fc3d2b759ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -8,10 +8,10 @@ package org.elasticsearch.versioning; import org.apache.lucene.tests.util.TestUtil; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; @@ -88,7 +88,7 @@ public void testExternalGTE() throws Exception { .get(); assertThat(indexResponse.getVersion(), equalTo(14L)); - ActionRequestBuilder builder1 = prepareIndex("test").setId("1") + RequestBuilder builder1 = prepareIndex("test").setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL_GTE); @@ -103,7 +103,7 @@ public void testExternalGTE() throws Exception { } // deleting with a lower version fails. - ActionRequestBuilder builder = client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE); + RequestBuilder builder = client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE); expectThrows(VersionConflictEngineException.class, builder); // Delete with a higher or equal version deletes all versions up to the given one. @@ -259,11 +259,11 @@ public void testCompareAndSet() { VersionConflictEngineException.class ); - ActionRequestBuilder builder6 = client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(1); + RequestBuilder builder6 = client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(1); expectThrows(VersionConflictEngineException.class, builder6); - ActionRequestBuilder builder5 = client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(2); + RequestBuilder builder5 = client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(2); expectThrows(VersionConflictEngineException.class, builder5); - ActionRequestBuilder builder4 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2); + RequestBuilder builder4 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2); expectThrows(VersionConflictEngineException.class, builder4); client().admin().indices().prepareRefresh().get(); @@ -295,15 +295,15 @@ public void testCompareAndSet() { assertThat(deleteResponse.getSeqNo(), equalTo(2L)); assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L)); - ActionRequestBuilder builder3 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1); + RequestBuilder builder3 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1); expectThrows(VersionConflictEngineException.class, builder3); - ActionRequestBuilder builder2 = client().prepareDelete("test", "1").setIfSeqNo(3).setIfPrimaryTerm(12); + RequestBuilder builder2 = client().prepareDelete("test", "1").setIfSeqNo(3).setIfPrimaryTerm(12); expectThrows(VersionConflictEngineException.class, builder2); - ActionRequestBuilder builder1 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2); + RequestBuilder builder1 = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2); expectThrows(VersionConflictEngineException.class, builder1); // the doc is deleted. Even when we hit the deleted seqNo, a conditional delete should fail. - ActionRequestBuilder builder = client().prepareDelete("test", "1").setIfSeqNo(2).setIfPrimaryTerm(1); + RequestBuilder builder = client().prepareDelete("test", "1").setIfSeqNo(2).setIfPrimaryTerm(1); expectThrows(VersionConflictEngineException.class, builder); } @@ -319,16 +319,13 @@ public void testSimpleVersioningWithFlush() throws Exception { assertThat(indexResponse.getSeqNo(), equalTo(1L)); client().admin().indices().prepareFlush().get(); - ActionRequestBuilder builder2 = prepareIndex("test").setId("1") - .setSource("field1", "value1_1") - .setIfSeqNo(0) - .setIfPrimaryTerm(1); + RequestBuilder builder2 = prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1); expectThrows(VersionConflictEngineException.class, builder2); - ActionRequestBuilder builder1 = prepareIndex("test").setId("1").setCreate(true).setSource("field1", "value1_1"); + RequestBuilder builder1 = prepareIndex("test").setId("1").setCreate(true).setSource("field1", "value1_1"); expectThrows(VersionConflictEngineException.class, builder1); - ActionRequestBuilder builder = client().prepareDelete("test", "1").setIfSeqNo(0).setIfPrimaryTerm(1); + RequestBuilder builder = client().prepareDelete("test", "1").setIfSeqNo(0).setIfPrimaryTerm(1); expectThrows(VersionConflictEngineException.class, builder); for (int i = 0; i < 10; i++) { diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index e72cb6c53e8e5..eddc96764273c 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -333,6 +333,7 @@ exports org.elasticsearch.search.aggregations.bucket.nested; exports org.elasticsearch.search.aggregations.bucket.range; exports org.elasticsearch.search.aggregations.bucket.sampler; + exports org.elasticsearch.search.aggregations.bucket.sampler.random; exports org.elasticsearch.search.aggregations.bucket.terms; exports org.elasticsearch.search.aggregations.bucket.terms.heuristic; exports org.elasticsearch.search.aggregations.metrics; diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index 0b8cd149744e3..89082389c5805 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -204,8 +204,7 @@ static URL getElasticsearchCodeSourceLocation() { public static Build readBuild(StreamInput in) throws IOException { final String flavor; - if (in.getTransportVersion().before(TransportVersions.V_8_3_0) - || in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().before(TransportVersions.V_8_3_0) || in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { flavor = in.readString(); } else { flavor = "default"; @@ -235,7 +234,7 @@ public static Build readBuild(StreamInput in) throws IOException { version = versionMatcher.group(1); qualifier = versionMatcher.group(2); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { minWireVersion = in.readString(); minIndexVersion = in.readString(); displayString = in.readString(); @@ -252,7 +251,7 @@ public static Build readBuild(StreamInput in) throws IOException { public static void writeBuild(Build build, StreamOutput out) throws IOException { if (out.getTransportVersion().before(TransportVersions.V_8_3_0) - || out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + || out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(build.flavor()); } out.writeString(build.type().displayName()); @@ -266,7 +265,7 @@ public static void writeBuild(Build build, StreamOutput out) throws IOException out.writeBoolean(build.isSnapshot()); out.writeString(build.qualifiedVersion()); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(build.minWireCompatVersion()); out.writeString(build.minIndexCompatVersion()); out.writeString(build.displayString()); diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 50a5f7420847b..4f29fb3a168b3 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -342,12 +342,20 @@ public static int getId(Class exception) { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, params, 0); + } + + /** + * Equivalent to {@link org.elasticsearch.xcontent.ToXContent#toXContent(XContentBuilder, Params)} except that it limits nesting depth + * so that it can avoid stackoverflow errors. + */ + protected XContentBuilder toXContent(XContentBuilder builder, Params params, int nestedLevel) throws IOException { Throwable ex = ExceptionsHelper.unwrapCause(this); if (ex != this) { - generateThrowableXContent(builder, params, this); + generateThrowableXContent(builder, params, this, nestedLevel); } else { - innerToXContent(builder, params, this, getExceptionName(), getMessage(), headers, metadata, getCause()); + innerToXContent(builder, params, this, getExceptionName(), getMessage(), headers, metadata, getCause(), nestedLevel); } return builder; } @@ -360,8 +368,17 @@ protected static void innerToXContent( String message, Map> headers, Map> metadata, - Throwable cause + Throwable cause, + int nestedLevel ) throws IOException { + + if (nestedLevel > MAX_NESTED_EXCEPTION_LEVEL) { + var terminalException = new IllegalStateException("too many nested exceptions"); + builder.field(TYPE, getExceptionName(terminalException)); + builder.field(REASON, terminalException.getMessage()); + return; + } + builder.field(TYPE, type); builder.field(REASON, message); @@ -377,7 +394,7 @@ protected static void innerToXContent( if (cause != null) { builder.field(CAUSED_BY); builder.startObject(); - generateThrowableXContent(builder, params, cause); + generateThrowableXContent(builder, params, cause, nestedLevel + 1); builder.endObject(); } } @@ -399,7 +416,7 @@ protected static void innerToXContent( builder.startArray(SUPPRESSED.getPreferredName()); for (Throwable suppressed : allSuppressed) { builder.startObject(); - generateThrowableXContent(builder, params, suppressed); + generateThrowableXContent(builder, params, suppressed, nestedLevel + 1); builder.endObject(); } builder.endArray(); @@ -552,18 +569,27 @@ public static ElasticsearchException innerFromXContent(XContentParser parser, bo /** * Static toXContent helper method that renders {@link org.elasticsearch.ElasticsearchException} or {@link Throwable} instances * as XContent, delegating the rendering to {@link #toXContent(XContentBuilder, Params)} - * or {@link #innerToXContent(XContentBuilder, Params, Throwable, String, String, Map, Map, Throwable)}. + * or {@link #innerToXContent(XContentBuilder, Params, Throwable, String, String, Map, Map, Throwable, int)}. * * This method is usually used when the {@link Throwable} is rendered as a part of another XContent object, and its result can * be parsed back using the {@link #fromXContent(XContentParser)} method. */ public static void generateThrowableXContent(XContentBuilder builder, Params params, Throwable t) throws IOException { + generateThrowableXContent(builder, params, t, 0); + } + + /** + * Equivalent to {@link #generateThrowableXContent(XContentBuilder, Params, Throwable)} but limits nesting depth + * so that it can avoid stackoverflow errors. + */ + protected static void generateThrowableXContent(XContentBuilder builder, Params params, Throwable t, int nestedLevel) + throws IOException { t = ExceptionsHelper.unwrapCause(t); if (t instanceof ElasticsearchException) { - ((ElasticsearchException) t).toXContent(builder, params); + ((ElasticsearchException) t).toXContent(builder, params, nestedLevel); } else { - innerToXContent(builder, params, t, getExceptionName(t), t.getMessage(), emptyMap(), emptyMap(), t.getCause()); + innerToXContent(builder, params, t, getExceptionName(t), t.getMessage(), emptyMap(), emptyMap(), t.getCause(), nestedLevel); } } @@ -1838,20 +1864,20 @@ private enum ElasticsearchExceptionHandle { org.elasticsearch.http.HttpHeadersValidationException.class, org.elasticsearch.http.HttpHeadersValidationException::new, 169, - TransportVersions.V_8_500_020 + TransportVersions.V_8_9_X ), ROLE_RESTRICTION_EXCEPTION( ElasticsearchRoleRestrictionException.class, ElasticsearchRoleRestrictionException::new, 170, - TransportVersions.V_8_500_020 + TransportVersions.V_8_9_X ), - API_NOT_AVAILABLE_EXCEPTION(ApiNotAvailableException.class, ApiNotAvailableException::new, 171, TransportVersions.V_8_500_065), + API_NOT_AVAILABLE_EXCEPTION(ApiNotAvailableException.class, ApiNotAvailableException::new, 171, TransportVersions.V_8_11_X), RECOVERY_COMMIT_TOO_NEW_EXCEPTION( RecoveryCommitTooNewException.class, RecoveryCommitTooNewException::new, 172, - TransportVersions.RECOVERY_COMMIT_TOO_NEW_EXCEPTION_ADDED + TransportVersions.V_8_11_X ), TOO_MANY_SCROLL_CONTEXTS_NEW_EXCEPTION( TooManyScrollContextsException.class, diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java new file mode 100644 index 0000000000000..440603cf10ae5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.internal.BuildExtension; +import org.elasticsearch.plugins.ExtensionLoader; + +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.NavigableMap; +import java.util.ServiceLoader; +import java.util.TreeMap; +import java.util.function.IntFunction; +import java.util.regex.Pattern; + +public class ReleaseVersions { + + private static final boolean USES_VERSIONS; + + static { + USES_VERSIONS = ExtensionLoader.loadSingleton(ServiceLoader.load(BuildExtension.class)) + .map(BuildExtension::hasReleaseVersioning) + .orElse(true); + } + + private static final Pattern VERSION_LINE = Pattern.compile("(\\d+\\.\\d+\\.\\d+),(\\d+)"); + + public static IntFunction generateVersionsLookup(Class versionContainer) { + if (USES_VERSIONS == false) return Integer::toString; + + try { + String versionsFileName = versionContainer.getSimpleName() + ".csv"; + InputStream versionsFile = versionContainer.getResourceAsStream(versionsFileName); + if (versionsFile == null) { + throw new FileNotFoundException(Strings.format("Could not find versions file for class [%s]", versionContainer)); + } + + NavigableMap> versions = new TreeMap<>(); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(versionsFile, StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + var matcher = VERSION_LINE.matcher(line); + if (matcher.matches() == false) { + throw new IOException(Strings.format("Incorrect format for line [%s] in [%s]", line, versionsFileName)); + } + try { + Integer id = Integer.valueOf(matcher.group(2)); + Version version = Version.fromString(matcher.group(1)); + versions.computeIfAbsent(id, k -> new ArrayList<>()).add(version); + } catch (IllegalArgumentException e) { + // cannot happen??? regex is wrong... + assert false : "Regex allowed non-integer id or incorrect version through: " + e; + throw new IOException(Strings.format("Incorrect format for line [%s] in [%s]", line, versionsFileName), e); + } + } + } + + // replace all version lists with the smallest & greatest versions + versions.replaceAll((k, v) -> { + if (v.size() == 1) { + return List.of(v.get(0)); + } else { + v.sort(Comparator.naturalOrder()); + return List.of(v.get(0), v.get(v.size() - 1)); + } + }); + + return lookupFunction(versions); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static IntFunction lookupFunction(NavigableMap> versions) { + assert versions.values().stream().allMatch(vs -> vs.size() == 1 || vs.size() == 2) + : "Version ranges have not been properly processed: " + versions; + + return id -> { + List versionRange = versions.get(id); + + String lowerBound, upperBound; + if (versionRange != null) { + lowerBound = versionRange.get(0).toString(); + upperBound = lastItem(versionRange).toString(); + } else { + // infer the bounds from the surrounding entries + var lowerRange = versions.lowerEntry(id); + if (lowerRange != null) { + // the next version is just a guess - might be a newer revision, might be a newer minor or major... + lowerBound = nextVersion(lastItem(lowerRange.getValue())).toString(); + } else { + // we know about all preceding versions - how can this version be less than everything else we know about??? + assert false : "Could not find preceding version for id " + id; + lowerBound = "snapshot[" + id + "]"; + } + + var upperRange = versions.higherEntry(id); + if (upperRange != null) { + // too hard to guess what version this id might be for using the next version - just use it directly + upperBound = upperRange.getValue().get(0).toString(); + } else { + // likely a version created after the last release tagged version - ok + upperBound = "snapshot[" + id + "]"; + } + } + + return lowerBound.equals(upperBound) ? lowerBound : lowerBound + "-" + upperBound; + }; + } + + private static T lastItem(List list) { + return list.get(list.size() - 1); + } + + private static Version nextVersion(Version version) { + return new Version(version.id + 100); // +1 to revision + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index d3224bb048393..22e02652e9f68 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -101,6 +101,14 @@ public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } + /** + * Returns a string representing the Elasticsearch release version of this transport version, + * if applicable for this deployment, otherwise the raw version number. + */ + public String toReleaseVersion() { + return TransportVersions.VERSION_LOOKUP.apply(id); + } + @Override public String toString() { return Integer.toString(id); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f289a7a3c89a1..8fb48ea39b3c8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -20,6 +20,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.IntFunction; /** *

    Transport version is used to coordinate compatible wire protocol communication between nodes, at a fine-grained level. This replaces @@ -92,36 +93,9 @@ static TransportVersion def(int id) { * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW TRANSPORT VERSIONS * Detached transport versions added below here. */ - public static final TransportVersion V_8_500_020 = def(8_500_020); - public static final TransportVersion V_8_500_061 = def(8_500_061); - public static final TransportVersion V_8_500_062 = def(8_500_062); - public static final TransportVersion V_8_500_063 = def(8_500_063); - public static final TransportVersion V_8_500_064 = def(8_500_064); - public static final TransportVersion V_8_500_065 = def(8_500_065); - public static final TransportVersion V_8_500_066 = def(8_500_066); - public static final TransportVersion SEARCH_RESP_SKIP_UNAVAILABLE_ADDED = def(8_500_067); - public static final TransportVersion ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED = def(8_500_068); - public static final TransportVersion SEARCH_APP_INDICES_REMOVED = def(8_500_069); - public static final TransportVersion GENERIC_NAMED_WRITABLE_ADDED = def(8_500_070); - public static final TransportVersion PINNED_QUERY_OPTIONAL_INDEX = def(8_500_071); - public static final TransportVersion SHARD_SIZE_PRIMARY_TERM_GEN_ADDED = def(8_500_072); - public static final TransportVersion COMPAT_VERSIONS_MAPPING_VERSION_ADDED = def(8_500_073); - public static final TransportVersion V_8_500_074 = def(8_500_074); - public static final TransportVersion NODE_INFO_INDEX_VERSION_ADDED = def(8_500_075); - public static final TransportVersion FIRST_NEW_ID_LAYOUT = def(8_501_00_0); - public static final TransportVersion COMMIT_PRIMARY_TERM_GENERATION = def(8_501_00_1); - public static final TransportVersion WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED = def(8_502_00_0); - public static final TransportVersion RECOVERY_COMMIT_TOO_NEW_EXCEPTION_ADDED = def(8_503_00_0); - public static final TransportVersion NODE_INFO_COMPONENT_VERSIONS_ADDED = def(8_504_00_0); - public static final TransportVersion COMPACT_FIELD_CAPS_ADDED = def(8_505_00_0); - public static final TransportVersion DATA_STREAM_RESPONSE_INDEX_PROPERTIES = def(8_506_00_0); - public static final TransportVersion ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED = def(8_507_00_0); - public static final TransportVersion LONG_COUNT_IN_HISTOGRAM_ADDED = def(8_508_00_0); - public static final TransportVersion INFERENCE_MODEL_SECRETS_ADDED = def(8_509_00_0); - public static final TransportVersion NODE_INFO_REQUEST_SIMPLIFIED = def(8_510_00_0); - public static final TransportVersion NESTED_KNN_VECTOR_QUERY_V = def(8_511_00_0); - public static final TransportVersion ML_PACKAGE_LOADER_PLATFORM_ADDED = def(8_512_00_0); - public static final TransportVersion ELSER_SERVICE_MODEL_VERSION_ADDED_PATCH = def(8_512_00_1); + public static final TransportVersion V_8_9_X = def(8_500_020); + public static final TransportVersion V_8_10_X = def(8_500_061); + public static final TransportVersion V_8_11_X = def(8_512_00_1); public static final TransportVersion PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME = def(8_513_00_0); public static final TransportVersion UNIVERSAL_PROFILING_LICENSE_ADDED = def(8_514_00_0); public static final TransportVersion ELSER_SERVICE_MODEL_VERSION_ADDED = def(8_515_00_0); @@ -170,6 +144,7 @@ static TransportVersion def(int id) { public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); public static final TransportVersion ENRICH_ELASTICSEARCH_VERSION_REMOVED = def(8_560_00_0); + public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH = def(8_560_00_1); public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); @@ -182,6 +157,14 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); + public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); + public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ = def(8_574_00_0); + public static final TransportVersion PEERFINDER_REPORTS_PEERS_MASTERS = def(8_575_00_0); + public static final TransportVersion ESQL_MULTI_CLUSTERS_ENRICH = def(8_576_00_0); + public static final TransportVersion NESTED_KNN_MORE_INNER_HITS = def(8_577_00_0); + public static final TransportVersion REQUIRE_DATA_STREAM_ADDED = def(8_578_00_0); + public static final TransportVersion ML_INFERENCE_COHERE_EMBEDDINGS_ADDED = def(8_579_00_0); + public static final TransportVersion DESIRED_NODE_VERSION_OPTIONAL_STRING = def(8_580_00_0); /* * STOP! READ THIS FIRST! No, really, @@ -246,7 +229,7 @@ static TransportVersion def(int id) { * Reference to the minimum transport version that can be used with CCS. * This should be the transport version used by the previous minor release. */ - public static final TransportVersion MINIMUM_CCS_VERSION = ML_PACKAGE_LOADER_PLATFORM_ADDED; + public static final TransportVersion MINIMUM_CCS_VERSION = V_8_11_X; static final NavigableMap VERSION_IDS = getAllVersionIds(TransportVersions.class); @@ -303,6 +286,8 @@ static Collection getAllVersions() { return VERSION_IDS.values(); } + static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class); + // no instance private TransportVersions() {} } diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 4181b077cb185..3a119d31d5dc3 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -117,6 +117,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_15 = new Version(7_17_15_99); public static final Version V_7_17_16 = new Version(7_17_16_99); public static final Version V_7_17_17 = new Version(7_17_17_99); + public static final Version V_7_17_18 = new Version(7_17_18_99); + public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); public static final Version V_8_1_0 = new Version(8_01_00_99); @@ -161,6 +163,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_11_3 = new Version(8_11_03_99); public static final Version V_8_11_4 = new Version(8_11_04_99); public static final Version V_8_12_0 = new Version(8_12_00_99); + public static final Version V_8_12_1 = new Version(8_12_01_99); public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version CURRENT = V_8_13_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index aebe4922e416a..d07717857169b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -89,19 +89,21 @@ default ActionListener delegateResponse(BiConsumer Type of the delegating listener's response - * @return Delegating listener + * @return a new listener that delegates failures to this listener and runs {@code bc} on a response. */ default ActionListener delegateFailure(BiConsumer, T> bc) { return new ActionListenerImplementations.DelegatingFailureActionListener<>(this, bc); } /** - * Same as {@link #delegateFailure(BiConsumer)} except that any failure thrown by {@code bc} or the delegate listener's - * {@link #onResponse} will be passed to the delegate listeners {@link #onFailure(Exception)}. + * Same as {@link #delegateFailure(BiConsumer)} except that any failure thrown by {@code bc} or the original listener's + * {@link #onResponse} will be passed to the original listener's {@link #onFailure(Exception)}. */ default ActionListener delegateFailureAndWrap(CheckedBiConsumer, T, ? extends Exception> bc) { return new ActionListenerImplementations.ResponseWrappingActionListener<>(this, bc); @@ -150,7 +152,7 @@ public String toString() { * the sense that an exception from the {@code onResponse} consumer is passed into the {@code onFailure} consumer. *

    * If the {@code onFailure} argument is {@code listener::onFailure} for some other {@link ActionListener}, prefer to use - * {@link #delegateFailureAndWrap} instead. + * {@link #delegateFailureAndWrap} instead for performance reasons. * @param onResponse the checked consumer of the response, executed when the listener is completed successfully. If it throws an * exception, the exception is passed to the {@code onFailure} consumer. * @param onFailure the consumer of the failure, executed when the listener is completed with an exception (or it is completed @@ -349,7 +351,7 @@ public boolean equals(Object obj) { /** * Execute the given action in a {@code try/catch} block which feeds all exceptions to the given listener's {@link #onFailure} method. */ - static > void run(L listener, CheckedConsumer action) { + static > void run(L listener, CheckedConsumer action) { try { action.accept(listener); } catch (Exception e) { @@ -357,4 +359,24 @@ static > void run(L listener, CheckedConsumer void runWithResource( + ActionListener listener, + CheckedSupplier resourceSupplier, + CheckedBiConsumer, R, ? extends Exception> action + ) { + R resource; + try { + resource = resourceSupplier.get(); + } catch (Exception e) { + safeOnFailure(listener, e); + return; + } + + ActionListener.run(ActionListener.runBefore(listener, resource::close), l -> action.accept(l, resource)); + } + } diff --git a/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java b/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java index 972e2d0f4adae..bf4f2dcc2d8db 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java @@ -197,6 +197,11 @@ public String toString() { } } + /** + * Replaces the onResponse handling of a given ActionListener with a lambda that receives both the original listener and a response. + * This is useful when a listener is needed to do some additional work with a response before passing a response on to the original + * listener. + */ static final class DelegatingFailureActionListener extends DelegatingActionListener { private final BiConsumer, T> bc; @@ -221,6 +226,10 @@ public String toString() { } } + /** + * The same as {@link DelegatingFailureActionListener} with the addition of exception handling in {@link #onResponse(Object)} to forward + * any exceptions to {@link #onFailure(Exception)}. + */ static final class ResponseWrappingActionListener extends DelegatingActionListener { private final CheckedBiConsumer, T, ? extends Exception> bc; diff --git a/server/src/main/java/org/elasticsearch/action/ActionListenerResponseHandler.java b/server/src/main/java/org/elasticsearch/action/ActionListenerResponseHandler.java index 4800ba191edf7..f511d5a333062 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListenerResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListenerResponseHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; @@ -55,7 +54,7 @@ public void handleException(TransportException e) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return executor; } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index dd70dc65b853b..4d3079ae88465 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -232,6 +232,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.multibindings.MapBinder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -264,6 +265,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHeaderDefinition; +import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestFieldCapabilitiesAction; import org.elasticsearch.rest.action.admin.cluster.RestAddVotingConfigExclusionAction; @@ -425,7 +427,6 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -446,6 +447,7 @@ public class ActionModule extends AbstractModule { private final Settings settings; private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NamedWriteableRegistry namedWriteableRegistry; private final IndexScopedSettings indexScopedSettings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; @@ -466,6 +468,7 @@ public class ActionModule extends AbstractModule { public ActionModule( Settings settings, IndexNameExpressionResolver indexNameExpressionResolver, + NamedWriteableRegistry namedWriteableRegistry, IndexScopedSettings indexScopedSettings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, @@ -483,6 +486,7 @@ public ActionModule( ) { this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; + this.namedWriteableRegistry = namedWriteableRegistry; this.indexScopedSettings = indexScopedSettings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; @@ -501,7 +505,7 @@ public ActionModule( new RestHeaderDefinition(Task.X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER, false) ) ).collect(Collectors.toSet()); - UnaryOperator restInterceptor = getRestServerComponent( + final RestInterceptor restInterceptor = getRestServerComponent( "REST interceptor", actionPlugins, restPlugin -> restPlugin.getRestHandlerInterceptor(threadPool.getThreadContext()) @@ -929,12 +933,12 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestBulkAction(settings)); registerHandler.accept(new RestUpdateAction()); - registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder())); + registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry)); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); registerHandler.accept(new RestOpenPointInTimeAction()); registerHandler.accept(new RestClosePointInTimeAction()); - registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder())); + registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry)); registerHandler.accept(new RestKnnSearchAction()); registerHandler.accept(new RestValidateQueryAction()); @@ -1001,12 +1005,13 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< // Desired nodes registerHandler.accept(new RestGetDesiredNodesAction()); - registerHandler.accept(new RestUpdateDesiredNodesAction()); + registerHandler.accept(new RestUpdateDesiredNodesAction(clusterSupportsFeature)); registerHandler.accept(new RestDeleteDesiredNodesAction()); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, + namedWriteableRegistry, restController, clusterSettings, indexScopedSettings, diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 32d65d743e6a6..32c8bcf155522 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -13,7 +13,9 @@ import java.util.Objects; -public abstract class ActionRequestBuilder { +public abstract class ActionRequestBuilder + implements + RequestBuilder { protected final ActionType action; protected final Request request; diff --git a/server/src/main/java/org/elasticsearch/action/ActionType.java b/server/src/main/java/org/elasticsearch/action/ActionType.java index 9ba2389d86be9..51927baf7b4db 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionType.java +++ b/server/src/main/java/org/elasticsearch/action/ActionType.java @@ -11,8 +11,6 @@ import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.transport.TransportService; @@ -25,13 +23,11 @@ * TYPE}. Some legacy implementations create custom subclasses of {@link ActionType} but this is unnecessary and somewhat wasteful. Prefer * to create instances of this class directly whenever possible. */ +@SuppressWarnings("unused") // Response type arg is used to enable better type inference when calling Client#execute public class ActionType { - private final String name; - private final Writeable.Reader responseReader; - /** - * Construct an {@link ActionType} which callers can execute on the local node (using {@link NodeClient}). + * Construct an {@link ActionType} with the given name. *

    * There is no facility for directly executing an action on a different node in the local cluster. To achieve this, implement an action * which runs on the local node and knows how to use the {@link TransportService} to forward the request to a different node. There are @@ -39,32 +35,26 @@ public class ActionType { * * @param name The name of the action, which must be unique across actions. * @return an {@link ActionType} which callers can execute on the local node. + * @deprecated Just create the {@link ActionType} directly. */ + @Deprecated(forRemoval = true) public static ActionType localOnly(String name) { - return new ActionType<>(name, Writeable.Reader.localOnly()); + return new ActionType<>(name); } - public static ActionType emptyResponse(String name) { - return new ActionType<>(name, in -> ActionResponse.Empty.INSTANCE); - } + private final String name; /** - * Construct an {@link ActionType} which callers can execute both on the local node (using {@link NodeClient}) and on a remote cluster - * (using a client obtained from {@link Client#getRemoteClusterClient}). If the action is only to be executed on the local cluster then - * declare it using {@link #localOnly} instead. + * Construct an {@link ActionType} with the given name. *

    * There is no facility for directly executing an action on a different node in the local cluster. To achieve this, implement an action * which runs on the local node and knows how to use the {@link TransportService} to forward the request to a different node. There are * several utilities that help implement such an action, including {@link TransportNodesAction} or {@link TransportMasterNodeAction}. * - * @param name The name of the action, which must be unique across actions. When executed on a remote cluster, this is the - * ID of the transport action which is sent to the handling node in the remote cluster. - * @param responseReader Defines how to deserialize responses received from executions of this action on remote clusters. Executions of - * this action on the local node receive the response object directly, without needing any deserialization. + * @param name The name of the action, which must be unique across actions. */ - public ActionType(String name, Writeable.Reader responseReader) { + public ActionType(String name) { this.name = name; - this.responseReader = responseReader; } /** @@ -74,13 +64,6 @@ public String name() { return this.name; } - /** - * Get a reader that can read a response from a {@link org.elasticsearch.common.io.stream.StreamInput}. - */ - public Writeable.Reader getResponseReader() { - return responseReader; - } - @Override public boolean equals(Object o) { return o instanceof ActionType actionType && name.equals(actionType.name); diff --git a/server/src/main/java/org/elasticsearch/action/DelegatingActionListener.java b/server/src/main/java/org/elasticsearch/action/DelegatingActionListener.java index 4985f5ab2fce1..6cc3c4cb92fae 100644 --- a/server/src/main/java/org/elasticsearch/action/DelegatingActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/DelegatingActionListener.java @@ -11,8 +11,12 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A wrapper around an {@link ActionListener} which delegates failures safely to the inner listener's {@link ActionListener#onFailure} - * method and which has a {@link #toString()} implementation which describes this class and the delegate. + * A wrapper around an {@link ActionListener} {@code L} that by default delegates failures to {@code L}'s {@link ActionListener#onFailure} + * method. The wrapper also provides a {@link #toString()} implementation that describes this class and the delegate. + *

    + * This is a useful base class for creating ActionListener wrappers that override the {@link #onResponse} handling, with access to + * {@code L}, while retaining all of {@code L}'s other handling. It can also be useful to override other methods to do new work with access + * to {@code L}. */ public abstract class DelegatingActionListener implements ActionListener { diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java index dab46aed5b4bc..7f3578ce9f16f 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java @@ -24,9 +24,11 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Locale; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.action.index.IndexRequest.MAX_DOCUMENT_ID_LENGTH_IN_BYTES; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -39,6 +41,9 @@ public interface DocWriteRequest extends IndicesRequest, Accountable { // Flag set for disallowing index auto creation for an individual write request. String REQUIRE_ALIAS = "require_alias"; + // Flag set for disallowing index auto creation if no matching data-stream index template is available. + String REQUIRE_DATA_STREAM = "require_data_stream"; + // Flag indicating that the list of executed pipelines should be returned in the request String LIST_EXECUTED_PIPELINES = "list_executed_pipelines"; @@ -147,6 +152,12 @@ public interface DocWriteRequest extends IndicesRequest, Accountable { */ boolean isRequireAlias(); + /** + * Should this request override specifically require the destination to be a data stream? + * @return boolean flag, when true specifically requires a data stream + */ + boolean isRequireDataStream(); + /** * Finalize the request before executing or routing it. */ @@ -314,4 +325,19 @@ static ActionRequestValidationException validateSeqNoBasedCASParams( return validationException; } + + static ActionRequestValidationException validateDocIdLength(String id, ActionRequestValidationException validationException) { + if (id != null && id.getBytes(StandardCharsets.UTF_8).length > MAX_DOCUMENT_ID_LENGTH_IN_BYTES) { + validationException = addValidationError( + "id [" + + id + + "] is too long, must be no longer than " + + MAX_DOCUMENT_ID_LENGTH_IN_BYTES + + " bytes but was: " + + id.getBytes(StandardCharsets.UTF_8).length, + validationException + ); + } + return validationException; + } } diff --git a/server/src/main/java/org/elasticsearch/action/RemoteClusterActionType.java b/server/src/main/java/org/elasticsearch/action/RemoteClusterActionType.java new file mode 100644 index 0000000000000..6e993fdbad8e1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/RemoteClusterActionType.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.transport.TransportResponse; + +/** + * An action which can be invoked by {@link RemoteClusterClient#execute}. The implementation must be registered with the transport service. + *

    + * Typically, every {@link RemoteClusterActionType} instance is a global constant (i.e. a public static final field) called {@code + * REMOTE_TYPE}. + */ +public final class RemoteClusterActionType { + + private final String name; + private final Writeable.Reader responseReader; + + public static RemoteClusterActionType emptyResponse(String name) { + return new RemoteClusterActionType<>(name, in -> ActionResponse.Empty.INSTANCE); + } + + /** + * Construct an {@link ActionType} which callers can execute on a remote cluster using a {@link RemoteClusterClient}, typically obtained + * from {@link Client#getRemoteClusterClient}). + * + * @param name The name of the action, which must be unique across actions. This is the ID of the transport action which is + * sent to the handling node in the remote cluster. + * @param responseReader Defines how to deserialize responses received from executions of this action. + */ + public RemoteClusterActionType(String name, Writeable.Reader responseReader) { + this.name = name; + this.responseReader = responseReader; + } + + /** + * The name of the action. Must be unique across actions. + */ + public String name() { + return this.name; + } + + /** + * Get a reader that can read a response from a {@link org.elasticsearch.common.io.stream.StreamInput}. + */ + public Writeable.Reader getResponseReader() { + return responseReader; + } + + @Override + public boolean equals(Object o) { + return o instanceof RemoteClusterActionType actionType && name.equals(actionType.name); + } + + @Override + public int hashCode() { + return name.hashCode(); + } + + @Override + public String toString() { + return name; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/RequestBuilder.java b/server/src/main/java/org/elasticsearch/action/RequestBuilder.java new file mode 100644 index 0000000000000..5db55d6569876 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/RequestBuilder.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.TimeValue; + +public interface RequestBuilder { + /** + * This method returns the request that this builder builds. Depending on the implementation, it might return a new request with each + * call or the same request with each call. + */ + Request request(); + + ActionFuture execute(); + + Response get(); + + Response get(TimeValue timeout); + + void execute(ActionListener listener); +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java index 7599eb2faef96..c51e6140fac89 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java @@ -44,10 +44,7 @@ public class TransportClusterAllocationExplainAction extends TransportMasterNode ClusterAllocationExplainRequest, ClusterAllocationExplainResponse> { - public static final ActionType TYPE = new ActionType<>( - "cluster:monitor/allocation/explain", - ClusterAllocationExplainResponse::new - ); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/allocation/explain"); private static final Logger logger = LogManager.getLogger(TransportClusterAllocationExplainAction.class); private final ClusterInfoService clusterInfoService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java index 76b563c3f540a..76a9b5a245a84 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java @@ -35,7 +35,7 @@ public class TransportDeleteDesiredBalanceAction extends TransportMasterNodeAction { - public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/desired_balance/reset"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/desired_balance/reset"); @Nullable private final MasterServiceTaskQueue resetDesiredBalanceTaskQueue; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java index 49611ffae8718..fca7b5c44fd29 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java @@ -43,10 +43,7 @@ public class TransportGetDesiredBalanceAction extends TransportMasterNodeReadAction { - public static final ActionType TYPE = new ActionType<>( - "cluster:admin/desired_balance/get", - DesiredBalanceResponse::from - ); + public static final ActionType TYPE = new ActionType<>("cluster:admin/desired_balance/get"); @Nullable private final DesiredBalanceShardsAllocator desiredBalanceShardsAllocator; private final ClusterInfoService clusterInfoService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java index b9bcf0944cd83..c540d535e60d4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java @@ -49,7 +49,7 @@ public class TransportAddVotingConfigExclusionsAction extends TransportMasterNod AddVotingConfigExclusionsRequest, ActionResponse.Empty> { - public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/voting_config/add_exclusions"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/voting_config/add_exclusions"); private static final Logger logger = LogManager.getLogger(TransportAddVotingConfigExclusionsAction.class); public static final Setting MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING = Setting.intSetting( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java index 113d085f51fdb..bbe292e817389 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java @@ -43,7 +43,7 @@ public class TransportClearVotingConfigExclusionsAction extends TransportMasterN ClearVotingConfigExclusionsRequest, ActionResponse.Empty> { - public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/voting_config/clear_exclusions"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/voting_config/clear_exclusions"); private static final Logger logger = LogManager.getLogger(TransportClearVotingConfigExclusionsAction.class); private final Reconfigurator reconfigurator; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoAction.java index 906aa00947bb2..95025c851fd94 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoAction.java @@ -37,7 +37,7 @@ public class ClusterFormationInfoAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java index 689e0579d1cbd..e6e2616e67662 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java @@ -35,11 +35,9 @@ import java.io.IOException; -public class TransportDeleteDesiredNodesAction extends TransportMasterNodeAction< - TransportDeleteDesiredNodesAction.Request, - ActionResponse.Empty> { +public class TransportDeleteDesiredNodesAction extends TransportMasterNodeAction { - public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/desired_nodes/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/desired_nodes/delete"); private final MasterServiceTaskQueue taskQueue; @Inject @@ -56,7 +54,7 @@ public TransportDeleteDesiredNodesAction( clusterService, threadPool, actionFilters, - Request::new, + AcknowledgedRequest.Plain::new, indexNameExpressionResolver, in -> ActionResponse.Empty.INSTANCE, EsExecutors.DIRECT_EXECUTOR_SERVICE @@ -65,13 +63,17 @@ public TransportDeleteDesiredNodesAction( } @Override - protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) - throws Exception { + protected void masterOperation( + Task task, + AcknowledgedRequest.Plain request, + ClusterState state, + ActionListener listener + ) throws Exception { taskQueue.submitTask("delete-desired-nodes", new DeleteDesiredNodesTask(listener), request.masterNodeTimeout()); } @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { + protected ClusterBlockException checkBlock(AcknowledgedRequest.Plain request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index 9f4c42a810563..fed28366568e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.desirednodes.DesiredNodesSettingsValidator; import org.elasticsearch.cluster.desirednodes.VersionConflictException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodes; @@ -36,9 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.List; import java.util.Locale; -import java.util.function.Consumer; import static java.lang.String.format; @@ -47,7 +44,6 @@ public class TransportUpdateDesiredNodesAction extends TransportMasterNodeAction private final RerouteService rerouteService; private final FeatureService featureService; - private final Consumer> desiredNodesValidator; private final MasterServiceTaskQueue taskQueue; @Inject @@ -60,30 +56,6 @@ public TransportUpdateDesiredNodesAction( ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AllocationService allocationService - ) { - this( - transportService, - clusterService, - rerouteService, - featureService, - threadPool, - actionFilters, - indexNameExpressionResolver, - new DesiredNodesSettingsValidator(), - allocationService - ); - } - - TransportUpdateDesiredNodesAction( - TransportService transportService, - ClusterService clusterService, - RerouteService rerouteService, - FeatureService featureService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Consumer> desiredNodesValidator, - AllocationService allocationService ) { super( UpdateDesiredNodesAction.NAME, @@ -99,7 +71,6 @@ public TransportUpdateDesiredNodesAction( ); this.rerouteService = rerouteService; this.featureService = featureService; - this.desiredNodesValidator = desiredNodesValidator; this.taskQueue = clusterService.createTaskQueue( "update-desired-nodes", Priority.URGENT, @@ -119,10 +90,14 @@ protected void masterOperation( ClusterState state, ActionListener responseListener ) throws Exception { - ActionListener.run(responseListener, listener -> { - desiredNodesValidator.accept(request.getNodes()); - taskQueue.submitTask("update-desired-nodes", new UpdateDesiredNodesTask(request, listener), request.masterNodeTimeout()); - }); + ActionListener.run( + responseListener, + listener -> taskQueue.submitTask( + "update-desired-nodes", + new UpdateDesiredNodesTask(request, listener), + request.masterNodeTimeout() + ) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesAction.java index 335ab15cd14cb..37f7baa3a30d9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesAction.java @@ -15,6 +15,6 @@ public class UpdateDesiredNodesAction extends ActionType PARSER = new ConstructingObjectParser<>( - "cluster_health_response", - true, - parsedObjects -> { - int i = 0; - // ClusterStateHealth fields - int numberOfNodes = (int) parsedObjects[i++]; - int numberOfDataNodes = (int) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int activePrimaryShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - double activeShardsPercent = (double) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i++]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - @SuppressWarnings("unchecked") - List indexList = (List) parsedObjects[i++]; - final Map indices; - if (indexList == null || indexList.isEmpty()) { - indices = emptyMap(); - } else { - indices = Maps.newMapWithExpectedSize(indexList.size()); - for (ClusterIndexHealth indexHealth : indexList) { - indices.put(indexHealth.getIndex(), indexHealth); - } - } - ClusterStateHealth stateHealth = new ClusterStateHealth( - activePrimaryShards, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - numberOfNodes, - numberOfDataNodes, - activeShardsPercent, - status, - indices - ); - - // ClusterHealthResponse fields - String clusterName = (String) parsedObjects[i++]; - int numberOfPendingTasks = (int) parsedObjects[i++]; - int numberOfInFlightFetch = (int) parsedObjects[i++]; - int delayedUnassignedShards = (int) parsedObjects[i++]; - long taskMaxWaitingTimeMillis = (long) parsedObjects[i++]; - boolean timedOut = (boolean) parsedObjects[i]; - return new ClusterHealthResponse( - clusterName, - numberOfPendingTasks, - numberOfInFlightFetch, - delayedUnassignedShards, - TimeValue.timeValueMillis(taskMaxWaitingTimeMillis), - timedOut, - stateHealth - ); - } - ); - - private static final ObjectParser.NamedObjectParser INDEX_PARSER = ( - XContentParser parser, - Void context, - String index) -> ClusterIndexHealth.innerFromXContent(parser, index); - - static { - // ClusterStateHealth fields - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_NODES)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_DATA_NODES)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_PRIMARY_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareDouble(constructorArg(), new ParseField(ACTIVE_SHARDS_PERCENT_AS_NUMBER)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - // Can be absent if LEVEL == 'cluster' - PARSER.declareNamedObjects(optionalConstructorArg(), INDEX_PARSER, new ParseField(INDICES)); - - // ClusterHealthResponse fields - PARSER.declareString(constructorArg(), new ParseField(CLUSTER_NAME)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_PENDING_TASKS)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_IN_FLIGHT_FETCH)); - PARSER.declareInt(constructorArg(), new ParseField(DELAYED_UNASSIGNED_SHARDS)); - PARSER.declareLong(constructorArg(), new ParseField(TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS)); - PARSER.declareBoolean(constructorArg(), new ParseField(TIMED_OUT)); - } + static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String INDICES = "indices"; private String clusterName; private int numberOfPendingTasks = 0; @@ -370,10 +273,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static ClusterHealthResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 5af2d546ac624..8cca2c5bf6472 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -49,7 +49,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction { public static final String NAME = "cluster:monitor/health"; - public static final ActionType TYPE = new ActionType(NAME, ClusterHealthResponse::new); + public static final ActionType TYPE = new ActionType(NAME); private static final Logger logger = LogManager.getLogger(TransportClusterHealthAction.class); private final AllocationService allocationService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusAction.java index 85c6c9ef9e133..4a57620d61e81 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusAction.java @@ -19,6 +19,6 @@ public class GetFeatureUpgradeStatusAction extends ActionType { - public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/hot_threads"); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/hot_threads"); @Inject public TransportNodesHotThreadsAction( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 6e700ca4aecc3..fde58a8328091 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -73,13 +73,13 @@ public NodeInfo(StreamInput in) throws IOException { } else { transportVersion = TransportVersion.fromId(legacyVersion.id); } - if (in.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_INDEX_VERSION_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { indexVersion = IndexVersion.readVersion(in); } else { indexVersion = IndexVersion.fromId(legacyVersion.id); } } - if (in.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_COMPONENT_VERSIONS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { componentVersions = in.readImmutableMap(StreamInput::readString, StreamInput::readVInt); } else { componentVersions = Map.of(); @@ -242,10 +242,8 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { TransportVersion.writeVersion(transportVersion, out); } - if (out.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_INDEX_VERSION_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { IndexVersion.writeVersion(indexVersion, out); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_COMPONENT_VERSIONS_ADDED)) { out.writeMap(componentVersions, StreamOutput::writeString, StreamOutput::writeVInt); } Build.writeBuild(build, out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 89753bfa94df8..826d74935f556 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -27,7 +27,7 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.TransportVersions.NODE_INFO_REQUEST_SIMPLIFIED; +import static org.elasticsearch.TransportVersions.V_8_11_X; public class TransportNodesInfoAction extends TransportNodesAction< NodesInfoRequest, @@ -35,7 +35,7 @@ public class TransportNodesInfoAction extends TransportNodesAction< TransportNodesInfoAction.NodeInfoRequest, NodeInfo> { - public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/info"); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/info"); private final NodeService nodeService; @Inject @@ -101,7 +101,7 @@ public static class NodeInfoRequest extends TransportRequest { public NodeInfoRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(NODE_INFO_REQUEST_SIMPLIFIED)) { + if (in.getTransportVersion().onOrAfter(V_8_11_X)) { this.nodesInfoMetrics = new NodesInfoMetrics(in); } else { this.nodesInfoMetrics = new NodesInfoRequest(in).getNodesInfoMetrics(); @@ -115,7 +115,7 @@ public NodeInfoRequest(NodesInfoRequest request) { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(NODE_INFO_REQUEST_SIMPLIFIED)) { + if (out.getTransportVersion().onOrAfter(V_8_11_X)) { this.nodesInfoMetrics.writeTo(out); } else { new NodesInfoRequest().clear().addMetrics(nodesInfoMetrics.requestedMetrics()).writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index 9598f378a188c..71da6fdeb1f3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -40,9 +40,7 @@ public class TransportNodesReloadSecureSettingsAction extends TransportNodesActi NodesReloadSecureSettingsRequest.NodeRequest, NodesReloadSecureSettingsResponse.NodeResponse> { - public static final ActionType TYPE = ActionType.localOnly( - "cluster:admin/nodes/reload_secure_settings" - ); + public static final ActionType TYPE = new ActionType<>("cluster:admin/nodes/reload_secure_settings"); private static final Logger logger = LogManager.getLogger(TransportNodesReloadSecureSettingsAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalAction.java index e0c4d07e2c385..4e996055a1962 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalAction.java @@ -16,7 +16,7 @@ public class PrevalidateNodeRemovalAction extends ActionType { public static final String ACTION_NAME = "internal:admin/indices/prevalidate_shard_path"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private static final Logger logger = LogManager.getLogger(TransportPrevalidateShardPathAction.class); private final TransportService transportService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index b8d1a431f92e8..cdb9191bd8d70 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -117,7 +117,7 @@ public NodeStats(StreamInput in) throws IOException { ingestStats = in.readOptionalWriteable(IngestStats::read); adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new); indexingPressureStats = in.readOptionalWriteable(IndexingPressureStats::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { repositoriesStats = in.readOptionalWriteable(RepositoriesStats::new); } else { repositoriesStats = null; @@ -294,7 +294,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(ingestStats); out.writeOptionalWriteable(adaptiveSelectionStats); out.writeOptionalWriteable(indexingPressureStats); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(repositoriesStats); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 106bad68e482e..1edc57b0a7df2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -41,7 +41,7 @@ public class TransportNodesStatsAction extends TransportNodesAction< TransportNodesStatsAction.NodeStatsRequest, NodeStats> { - public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/stats"); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/stats"); private final NodeService nodeService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 86d0206d62b65..50fea2093da49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -9,16 +9,17 @@ package org.elasticsearch.action.admin.cluster.node.tasks.cancel; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; /** * ActionType for cancelling running tasks */ -public class CancelTasksAction extends ActionType { +public class CancelTasksAction extends ActionType { public static final CancelTasksAction INSTANCE = new CancelTasksAction(); public static final String NAME = "cluster:admin/tasks/cancel"; private CancelTasksAction() { - super(NAME, CancelTasksResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java index 45fc4e352a4ba..5fdd50e0c9e66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java @@ -8,13 +8,14 @@ package org.elasticsearch.action.admin.cluster.node.tasks.cancel; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.tasks.TasksRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; /** * Builder for the request to cancel tasks running on the specified nodes */ -public class CancelTasksRequestBuilder extends TasksRequestBuilder { +public class CancelTasksRequestBuilder extends TasksRequestBuilder { public CancelTasksRequestBuilder(ElasticsearchClient client) { super(client, CancelTasksAction.INSTANCE, new CancelTasksRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java deleted file mode 100644 index a53ed8dacc36c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.cancel; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -/** - * Returns the list of tasks that were cancelled - */ -public class CancelTasksResponse extends ListTasksResponse { - - private static final ConstructingObjectParser PARSER = setupParser( - "cancel_tasks_response", - CancelTasksResponse::new - ); - - public CancelTasksResponse(StreamInput in) throws IOException { - super(in); - } - - public CancelTasksResponse( - List tasks, - List taskFailures, - List nodeFailures - ) { - super(tasks, taskFailures, nodeFailures); - } - - public static CancelTasksResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index aa7c19cf35514..1f3271be79797 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; @@ -31,7 +32,7 @@ * For a task to be cancellable it has to return an instance of * {@link CancellableTask} from {@link TransportRequest#createTask} */ -public class TransportCancelTasksAction extends TransportTasksAction { +public class TransportCancelTasksAction extends TransportTasksAction { @Inject public TransportCancelTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { @@ -41,7 +42,7 @@ public TransportCancelTasksAction(ClusterService clusterService, TransportServic transportService, actionFilters, CancelTasksRequest::new, - CancelTasksResponse::new, + ListTasksResponse::new, TaskInfo::from, // Cancellation is usually lightweight, and runs on the transport thread if the task didn't even start yet, but some // implementations of CancellableTask#onCancelled() are nontrivial so we use GENERIC here. TODO could it be SAME? @@ -50,13 +51,13 @@ public TransportCancelTasksAction(ClusterService clusterService, TransportServic } @Override - protected CancelTasksResponse newResponse( + protected ListTasksResponse newResponse( CancelTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions ) { - return new CancelTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); } protected List processTasks(CancelTasksRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java index a2acdbcd98c1d..21be31462ef0d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java @@ -20,6 +20,6 @@ public class GetTaskAction extends ActionType { public static final String NAME = "cluster:monitor/task/get"; private GetTaskAction() { - super(NAME, GetTaskResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index c0c9ec493de70..62ede5b2f480b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -45,7 +45,7 @@ public class TransportListTasksAction extends TransportTasksAction TYPE = new ActionType<>("cluster:monitor/tasks/lists", ListTasksResponse::new); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/tasks/lists"); public static long waitForCompletionTimeout(TimeValue timeout) { if (timeout == null) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java index c661023e79629..638773cce52e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java @@ -34,7 +34,7 @@ public class TransportNodesUsageAction extends TransportNodesAction< TransportNodesUsageAction.NodeUsageRequest, NodeUsage> { - public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/usage"); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/usage"); private final UsageService restUsageService; private final AggregationUsageService aggregationUsageService; private final long sinceTime; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java index b1394d261e790..5f3fd654eeb84 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoMetrics; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; @@ -38,7 +39,11 @@ public class RemoteClusterNodesAction { public static final String NAME = "cluster:internal/remote_cluster/nodes"; - public static final ActionType TYPE = new ActionType<>(NAME, RemoteClusterNodesAction.Response::new); + public static final ActionType TYPE = new ActionType<>(NAME); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + RemoteClusterNodesAction.Response::new + ); public static class Request extends ActionRequest { public static final Request ALL_NODES = new Request(false); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java index 6e95a9807c4b2..c7d74fc414115 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -24,17 +23,13 @@ public final class TransportRemoteInfoAction extends HandledTransportAction { - public static final ActionType TYPE = new ActionType<>("cluster:monitor/remote/info", RemoteInfoResponse::new); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/remote/info"); private final RemoteClusterService remoteClusterService; @Inject - public TransportRemoteInfoAction( - TransportService transportService, - ActionFilters actionFilters, - SearchTransportService searchTransportService - ) { + public TransportRemoteInfoAction(TransportService transportService, ActionFilters actionFilters) { super(TYPE.name(), transportService, actionFilters, RemoteInfoRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); - this.remoteClusterService = searchTransportService.getRemoteClusterService(); + this.remoteClusterService = transportService.getRemoteClusterService(); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java index 94fe5a96f89a2..d71c66fd6f3ca 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java @@ -15,6 +15,6 @@ public final class CleanupRepositoryAction extends ActionType { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/repository/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/repository/delete"); private final RepositoriesService repositoriesService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java index 3411f29819f3c..11e31a539fffd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java @@ -19,7 +19,7 @@ public class GetRepositoriesAction extends ActionType { public static final String NAME = "cluster:admin/repository/get"; private GetRepositoriesAction() { - super(NAME, GetRepositoriesResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index 98b2d1561eb01..eb7e26b30e874 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -34,7 +34,7 @@ */ public class TransportPutRepositoryAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/repository/put"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/repository/put"); private final RepositoriesService repositoriesService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java index 0daaae79592f1..fe66441c41844 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java @@ -19,6 +19,6 @@ public class VerifyRepositoryAction extends ActionType public static final String NAME = "cluster:admin/repository/verify"; private VerifyRepositoryAction() { - super(NAME, VerifyRepositoryResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java index 6b626f7c0b170..a09ef9e38bd19 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java @@ -16,6 +16,6 @@ public class ClusterRerouteAction extends ActionType { public static final String NAME = "cluster:admin/reroute"; private ClusterRerouteAction() { - super(NAME, ClusterRerouteResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java index 390d2659bf492..52c44c33be3bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.reroute; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; @@ -111,11 +110,6 @@ public AllocationCommands getCommands() { return commands; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java index 0e868087b637d..1ee458100c47f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java @@ -26,7 +26,7 @@ public class ClusterGetSettingsAction extends ActionType { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/snapshot/clone"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/clone"); private final SnapshotsService snapshotsService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java index eb3a3dfdaebb9..d24fbbb63246b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java @@ -19,6 +19,6 @@ public class CreateSnapshotAction extends ActionType { public static final String NAME = "cluster:admin/snapshot/create"; private CreateSnapshotAction() { - super(NAME, CreateSnapshotResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index 1451f39dadf7c..5db56bfe41ad0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -29,7 +29,7 @@ * Transport action for delete snapshot operation */ public class TransportDeleteSnapshotAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/snapshot/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/delete"); private final SnapshotsService snapshotsService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/features/ResetFeatureStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/features/ResetFeatureStateAction.java index 6953fc00b481a..ce1af6abd7732 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/features/ResetFeatureStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/features/ResetFeatureStateAction.java @@ -17,6 +17,6 @@ public class ResetFeatureStateAction extends ActionType { public static final String NAME = "cluster:admin/snapshot/get"; private GetSnapshotsAction() { - super(NAME, GetSnapshotsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java index bdf822d79f4c5..7cb3440e422a5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java @@ -16,6 +16,6 @@ public class GetShardSnapshotAction extends ActionType public static final String NAME = "internal:admin/snapshot/get_shard"; public GetShardSnapshotAction() { - super(NAME, GetShardSnapshotResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java index a3b6db6ad97c7..3e3916bab19ae 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java @@ -19,6 +19,6 @@ public class RestoreSnapshotAction extends ActionType { public static final String NAME = "cluster:admin/snapshot/restore"; private RestoreSnapshotAction() { - super(NAME, RestoreSnapshotResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java index 9f68ac17a25e8..16faab60b561f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java @@ -19,6 +19,6 @@ public class SnapshotsStatusAction extends ActionType { public static final String NAME = "cluster:admin/snapshot/status"; private SnapshotsStatusAction() { - super(NAME, SnapshotsStatusResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index c7a29e61da28d..9215d97490629 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.snapshots.Snapshot; @@ -51,7 +50,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction< TransportNodesSnapshotsStatus.NodeSnapshotStatus> { public static final String ACTION_NAME = SnapshotsStatusAction.NAME + "[nodes]"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final SnapshotShardsService snapshotShardsService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java index 6ea5a389bd23c..96cb85d241b52 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java @@ -9,13 +9,18 @@ package org.elasticsearch.action.admin.cluster.state; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; public class ClusterStateAction extends ActionType { public static final ClusterStateAction INSTANCE = new ClusterStateAction(); public static final String NAME = "cluster:monitor/state"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + ClusterStateResponse::new + ); private ClusterStateAction() { - super(NAME, ClusterStateResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java index 81a26999d2907..9105c20044223 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java @@ -42,7 +42,7 @@ */ public final class AnalysisStats implements ToXContentFragment, Writeable { - private static final TransportVersion SYNONYM_SETS_VERSION = TransportVersions.V_8_500_061; + private static final TransportVersion SYNONYM_SETS_VERSION = TransportVersions.V_8_10_X; private static final Set SYNONYM_FILTER_TYPES = Set.of("synonym", "synonym_graph"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java index a6bd5bb1f66da..f2e3547d08cda 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class ClusterStatsAction extends ActionType { @@ -17,6 +16,6 @@ public class ClusterStatsAction extends ActionType { public static final String NAME = "cluster:monitor/stats"; private ClusterStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextAction.java index d984debcce1d0..f1b1e481aea2b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextAction.java @@ -16,6 +16,6 @@ public class GetScriptContextAction extends ActionType public static final String NAME = "cluster:admin/script_context/get"; private GetScriptContextAction() { - super(NAME, GetScriptContextResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageAction.java index 267f15cabfc1e..e8655b3022abd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageAction.java @@ -15,6 +15,6 @@ public class GetScriptLanguageAction extends ActionType { public static final String NAME = "cluster:admin/script/get"; private GetStoredScriptAction() { - super(NAME, GetStoredScriptResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 0202a0355abb6..24604a3977096 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -13,47 +13,19 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.StoredScriptSource; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public class GetStoredScriptResponse extends ActionResponse implements ToXContentObject { public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); public static final ParseField SCRIPT = new ParseField("script"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "GetStoredScriptResponse", - true, - (a, c) -> { - String id = (String) a[0]; - boolean found = (Boolean) a[1]; - StoredScriptSource scriptSource = (StoredScriptSource) a[2]; - return found ? new GetStoredScriptResponse(id, scriptSource) : new GetStoredScriptResponse(id, null); - } - ); - - static { - PARSER.declareField(constructorArg(), (p, c) -> p.text(), _ID_PARSE_FIELD, ObjectParser.ValueType.STRING); - PARSER.declareField(constructorArg(), (p, c) -> p.booleanValue(), FOUND_PARSE_FIELD, ObjectParser.ValueType.BOOLEAN); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> StoredScriptSource.fromXContent(p, true), - SCRIPT, - ObjectParser.ValueType.OBJECT - ); - } - private String id; private StoredScriptSource source; @@ -103,10 +75,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static GetStoredScriptResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { if (source == null) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java index 37821f597a8e5..a5d1fd7e151c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java @@ -27,7 +27,7 @@ public class TransportDeleteStoredScriptAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/script/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/script/delete"); @Inject public TransportDeleteStoredScriptAction( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java index f526cf37e357b..6a73cd0b91264 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java @@ -27,7 +27,7 @@ public class TransportPutStoredScriptAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/script/put"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/script/put"); private final ScriptService scriptService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index a03f0d36f7dad..efca8ab779b20 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -31,10 +31,7 @@ public class TransportPendingClusterTasksAction extends TransportMasterNodeReadA PendingClusterTasksRequest, PendingClusterTasksResponse> { - public static final ActionType TYPE = new ActionType<>( - "cluster:monitor/task", - PendingClusterTasksResponse::new - ); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/task"); private static final Logger logger = LogManager.getLogger(TransportPendingClusterTasksAction.class); private final ClusterService clusterService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 367837fa91296..e56be8852e7df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -59,7 +59,7 @@ public class TransportIndicesAliasesAction extends AcknowledgedTransportMasterNodeAction { public static final String NAME = "indices:admin/aliases"; - public static final ActionType TYPE = ActionType.localOnly(NAME); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportIndicesAliasesAction.class); private final MetadataIndexAliasesService indexAliasesService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java index a04c7c2c2af60..a033abfd0a107 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.indices.alias.get; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class GetAliasesAction extends ActionType { @@ -17,6 +16,6 @@ public class GetAliasesAction extends ActionType { public static final String NAME = "indices:admin/aliases/get"; private GetAliasesAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 9b9fb49c1bbe0..3e8e6fbfde75c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.SystemIndices; @@ -147,21 +146,9 @@ static Map> postProcess( ClusterState state ) { Map> result = new HashMap<>(); - boolean noAliasesSpecified = request.getOriginalAliases() == null || request.getOriginalAliases().length == 0; List requestedDataStreams = resolver.dataStreamNames(state, request.indicesOptions(), request.indices()); - for (String requestedDataStream : requestedDataStreams) { - List aliases = state.metadata() - .dataStreamAliases() - .values() - .stream() - .filter(alias -> alias.getDataStreams().contains(requestedDataStream)) - .filter(alias -> noAliasesSpecified || Regex.simpleMatch(request.aliases(), alias.getName())) - .toList(); - if (aliases.isEmpty() == false) { - result.put(requestedDataStream, aliases); - } - } - return result; + + return state.metadata().findDataStreamAliases(request.aliases(), requestedDataStreams.toArray(new String[0])); } private static void checkSystemIndexAccess( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 6c5a271c3338b..d19aacb306414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -40,7 +40,7 @@ public class AnalyzeAction extends ActionType { public static final String NAME = "indices:admin/analyze"; private AnalyzeAction() { - super(NAME, AnalyzeAction.Response::new); + super(NAME); } public static class Fields { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java index e2894f072011c..be33fada9c934 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java @@ -25,7 +25,7 @@ public class ReloadAnalyzersRequest extends BroadcastRequest { - public static final ActionType TYPE = new ActionType<>( - "indices:admin/reload_analyzers", - ReloadAnalyzersResponse::new - ); + public static final ActionType TYPE = new ActionType<>("indices:admin/reload_analyzers"); private static final Logger logger = LogManager.getLogger(TransportReloadAnalyzersAction.class); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java index 79a70969edaaf..74184598c6db2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class ClearIndicesCacheAction extends ActionType { +public class ClearIndicesCacheAction extends ActionType { public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); public static final String NAME = "indices:admin/cache/clear"; private ClearIndicesCacheAction() { - super(NAME, ClearIndicesCacheResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java index 464c22d1119b0..fb6139c0ae4e3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java @@ -9,11 +9,12 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBuilder< ClearIndicesCacheRequest, - ClearIndicesCacheResponse, + BroadcastResponse, ClearIndicesCacheRequestBuilder> { public ClearIndicesCacheRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java deleted file mode 100644 index df0a298c87eeb..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * The response of a clear cache action. - */ -public class ClearIndicesCacheResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "clear_cache", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ClearIndicesCacheResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - - static { - declareBroadcastFields(PARSER); - } - - ClearIndicesCacheResponse(StreamInput in) throws IOException { - super(in); - } - - ClearIndicesCacheResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static ClearIndicesCacheResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 86f0093598744..faeaf0bdb575a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -32,7 +33,7 @@ */ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAction< ClearIndicesCacheRequest, - ClearIndicesCacheResponse, + BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; @@ -64,11 +65,11 @@ protected EmptyResult readShardResult(StreamInput in) throws IOException { } @Override - protected ResponseFactory getResponseFactory( + protected ResponseFactory getResponseFactory( ClearIndicesCacheRequest request, ClusterState clusterState ) { - return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new ClearIndicesCacheResponse( + return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index f4a65a2b6490f..5fe1ba459ba93 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -41,7 +41,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction { public static final String NAME = "indices:admin/close"; - public static final ActionType TYPE = new ActionType<>(NAME, CloseIndexResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportCloseIndexAction.class); private final MetadataIndexStateService indexStateService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java index c5b5602a963d4..ac2f437f7225a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java @@ -42,7 +42,7 @@ public class TransportVerifyShardBeforeCloseAction extends TransportReplicationA ReplicationResponse> { public static final String NAME = TransportCloseIndexAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportVerifyShardBeforeCloseAction.class); @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 87334afa3ed8a..98848f041cea2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -69,7 +69,7 @@ public final class AutoCreateAction extends ActionType { public static final String NAME = "indices:admin/auto_create"; private AutoCreateAction() { - super(NAME, CreateIndexResponse::new); + super(NAME); } public static final class TransportAction extends TransportMasterNodeAction { @@ -244,7 +244,8 @@ ClusterState execute( // This expression only evaluates to true when the argument is non-null and false if (isSystemDataStream == false && Boolean.FALSE.equals(template.getAllowAutoCreate())) { throw new IndexNotFoundException( - "composable template " + template.indexPatterns() + " forbids index auto creation" + "composable template " + template.indexPatterns() + " forbids index auto creation", + request.index() ); } @@ -272,6 +273,13 @@ ClusterState execute( successfulRequests.put(request, indexNames); return clusterState; } else { + if (request.isRequireDataStream()) { + throw new IndexNotFoundException( + "the index creation request requires a data stream, " + + "but no matching index template with data stream template was found for it", + request.index() + ); + } final var indexName = IndexNameExpressionResolver.resolveDateMathExpression(request.index()); if (isSystemIndex) { if (indexName.equals(request.index()) == false) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java index e22a8484ff381..5560c44f3fcbe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java @@ -16,7 +16,7 @@ public class CreateIndexAction extends ActionType { public static final String NAME = "indices:admin/create"; private CreateIndexAction() { - super(NAME, CreateIndexResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 136f261dc3ef3..2ec6db339b6ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -62,6 +62,8 @@ public class CreateIndexRequest extends AcknowledgedRequest private String index; + private boolean requireDataStream; + private Settings settings = Settings.EMPTY; private String mappings = "{}"; @@ -102,6 +104,11 @@ public CreateIndexRequest(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { origin = in.readString(); } + if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + requireDataStream = in.readBoolean(); + } else { + requireDataStream = false; + } } public CreateIndexRequest() {} @@ -446,6 +453,18 @@ public CreateIndexRequest waitForActiveShards(final int waitForActiveShards) { return waitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } + public boolean isRequireDataStream() { + return requireDataStream; + } + + /** + * Set whether this CreateIndexRequest requires a data stream. The data stream may be pre-existing or to-be-created. + */ + public CreateIndexRequest requireDataStream(boolean requireDataStream) { + this.requireDataStream = requireDataStream; + return this; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -468,6 +487,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { out.writeString(origin); } + if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + out.writeOptionalBoolean(this.requireDataStream); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 7052d4b1356ac..307cafbb9b8e1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -246,4 +246,12 @@ public CreateIndexRequestBuilder setWaitForActiveShards(ActiveShardCount waitFor public CreateIndexRequestBuilder setWaitForActiveShards(final int waitForActiveShards) { return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } + + /** + * Set whether this request requires a data stream. The data stream may be pre-existing or to-be-created. + */ + public CreateIndexRequestBuilder setRequireDataStream(final boolean requireDataStream) { + request.requireDataStream(requireDataStream); + return this; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index 3429457dd7e0f..f0596d061aeb3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -46,7 +46,7 @@ protected static void declareFields(Constructing private final String index; - protected CreateIndexResponse(StreamInput in) throws IOException { + public CreateIndexResponse(StreamInput in) throws IOException { super(in, true); index = in.readString(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java index 069de4b15501a..b8206cba8de2a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.indices.dangling.delete; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,11 +35,6 @@ public DeleteDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { this.acceptDataLoss = acceptDataLoss; } - @Override - public ActionRequestValidationException validate() { - return null; - } - public String getIndexUUID() { return indexUUID; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java index 954e42fc0d7ef..95b1410e16565 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java @@ -49,7 +49,7 @@ * to add the index to the index graveyard. */ public class TransportDeleteDanglingIndexAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/indices/dangling/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/delete"); private static final Logger logger = LogManager.getLogger(TransportDeleteDanglingIndexAction.class); private final Settings settings; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java index e3178c4b7fc30..c39f757887e53 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java @@ -35,7 +35,7 @@ public class TransportFindDanglingIndexAction extends TransportNodesAction< NodeFindDanglingIndexRequest, NodeFindDanglingIndexResponse> { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/indices/dangling/find"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/find"); private final TransportService transportService; private final DanglingIndicesState danglingIndicesState; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java index 2ff8e88445fc7..66378ab9907d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.indices.dangling.import_index; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,11 +31,6 @@ public ImportDanglingIndexRequest(StreamInput in) throws IOException { this.acceptDataLoss = in.readBoolean(); } - @Override - public ActionRequestValidationException validate() { - return null; - } - public ImportDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { super(); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java index 59bf71a4387e1..d3957be682cfd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java @@ -39,7 +39,7 @@ * to perform the actual allocation. */ public class TransportImportDanglingIndexAction extends HandledTransportAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/indices/dangling/import"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/import"); private static final Logger logger = LogManager.getLogger(TransportImportDanglingIndexAction.class); private final LocalAllocateDangledIndices danglingIndexAllocator; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java index 7baa190e3899d..e347874599ff4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java @@ -37,7 +37,7 @@ public class TransportListDanglingIndicesAction extends TransportNodesAction< NodeListDanglingIndicesRequest, NodeListDanglingIndicesResponse> { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/indices/dangling/list"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/list"); private final TransportService transportService; private final DanglingIndicesState danglingIndicesState; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index c980e35c00e44..1eb8612d16da3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -37,7 +37,7 @@ */ public class TransportDeleteIndexAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/delete"); + public static final ActionType TYPE = new ActionType<>("indices:admin/delete"); private static final Logger logger = LogManager.getLogger(TransportDeleteIndexAction.class); private final MetadataDeleteIndexService deleteIndexService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java index ee6d7b14f1930..710bf5077b73d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java @@ -15,6 +15,6 @@ public class AnalyzeIndexDiskUsageAction extends ActionType { +public class FlushAction extends ActionType { public static final FlushAction INSTANCE = new FlushAction(); public static final String NAME = "indices:admin/flush"; private FlushAction() { - super(NAME, FlushResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java index 64485ad0d4496..fc326f804ce8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java @@ -23,7 +23,6 @@ * memory heuristics in order to automatically trigger flush operations as required in order to clear memory. * * @see org.elasticsearch.client.internal.IndicesAdminClient#flush(FlushRequest) - * @see FlushResponse */ public class FlushRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java index 4e474732e3bad..f23e247428698 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java @@ -9,9 +9,10 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; -public class FlushRequestBuilder extends BroadcastOperationRequestBuilder { +public class FlushRequestBuilder extends BroadcastOperationRequestBuilder { public FlushRequestBuilder(ElasticsearchClient client) { super(client, FlushAction.INSTANCE, new FlushRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java deleted file mode 100644 index 0a037ebe09f8a..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.flush; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * A response to flush action. - */ -public class FlushResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("flush", true, arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new FlushResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - }); - - static { - declareBroadcastFields(PARSER); - } - - FlushResponse(StreamInput in) throws IOException { - super(in); - } - - FlushResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static FlushResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index ade775db9c755..96b4a0191b10c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -28,7 +29,7 @@ */ public class TransportFlushAction extends TransportBroadcastReplicationAction< FlushRequest, - FlushResponse, + BroadcastResponse, ShardFlushRequest, ReplicationResponse> { @@ -59,12 +60,12 @@ protected ShardFlushRequest newShardRequest(FlushRequest request, ShardId shardI } @Override - protected FlushResponse newResponse( + protected BroadcastResponse newResponse( int successfulShards, int failedShards, int totalNumCopies, List shardFailures ) { - return new FlushResponse(totalNumCopies, successfulShards, failedShards, shardFailures); + return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 29af167679451..74ae53f7ac9de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -35,7 +35,7 @@ public class TransportShardFlushAction extends TransportReplicationAction { public static final String NAME = FlushAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); @Inject public TransportShardFlushAction( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java index 3ab30298a57f5..df6a42a67ae54 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class ForceMergeAction extends ActionType { +public class ForceMergeAction extends ActionType { public static final ForceMergeAction INSTANCE = new ForceMergeAction(); public static final String NAME = "indices:admin/forcemerge"; private ForceMergeAction() { - super(NAME, ForceMergeResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java index 241f1a0c7fbf6..37075dd896b80 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java @@ -30,7 +30,6 @@ * to execute, and if so, executes it * * @see org.elasticsearch.client.internal.IndicesAdminClient#forceMerge(ForceMergeRequest) - * @see ForceMergeResponse */ public class ForceMergeRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java index 835749751f4a6..d4c15ee799670 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; /** @@ -20,7 +21,7 @@ */ public class ForceMergeRequestBuilder extends BroadcastOperationRequestBuilder< ForceMergeRequest, - ForceMergeResponse, + BroadcastResponse, ForceMergeRequestBuilder> { public ForceMergeRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java deleted file mode 100644 index 3853a944e8676..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.forcemerge; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * A response for force merge action. - */ -public class ForceMergeResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "force_merge", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ForceMergeResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - - static { - declareBroadcastFields(PARSER); - } - - ForceMergeResponse(StreamInput in) throws IOException { - super(in); - } - - public ForceMergeResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static ForceMergeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index a70498695e149..df98e8f12f18e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -35,7 +36,7 @@ */ public class TransportForceMergeAction extends TransportBroadcastByNodeAction< ForceMergeRequest, - ForceMergeResponse, + BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; @@ -68,8 +69,8 @@ protected EmptyResult readShardResult(StreamInput in) throws IOException { } @Override - protected ResponseFactory getResponseFactory(ForceMergeRequest request, ClusterState clusterState) { - return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new ForceMergeResponse( + protected ResponseFactory getResponseFactory(ForceMergeRequest request, ClusterState clusterState) { + return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java index 455df986b4733..ea2671eafe01f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java @@ -9,13 +9,15 @@ package org.elasticsearch.action.admin.indices.get; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; public class GetIndexAction extends ActionType { public static final GetIndexAction INSTANCE = new GetIndexAction(); public static final String NAME = "indices:admin/get"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>(NAME, GetIndexResponse::new); private GetIndexAction() { - super(NAME, GetIndexResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java index 6b7a80e2d59ca..0245457ac7588 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java @@ -16,7 +16,7 @@ public class GetFieldMappingsAction extends ActionType public static final String NAME = "indices:admin/mappings/fields/get"; private GetFieldMappingsAction() { - super(NAME, GetFieldMappingsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java index e96440864fda0..2e5976f8239d5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java @@ -16,6 +16,6 @@ public class GetMappingsAction extends ActionType { public static final String NAME = "indices:admin/mappings/get"; private GetMappingsAction() { - super(NAME, GetMappingsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 349391e425212..28c01198f516f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -50,7 +50,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc GetFieldMappingsResponse> { private static final String ACTION_NAME = GetFieldMappingsAction.NAME + "[index]"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, GetFieldMappingsResponse::new); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); protected final ClusterService clusterService; private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java index 635412c8be2e8..c8f80eae99a3d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java @@ -19,6 +19,7 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest { private final CompressedXContent source; + private boolean autoUpdate; public PutMappingClusterStateUpdateRequest(String source) throws IOException { this.source = CompressedXContent.fromJSON(source); @@ -28,4 +29,12 @@ public CompressedXContent source() { return source; } + public PutMappingClusterStateUpdateRequest autoUpdate(boolean autoUpdate) { + this.autoUpdate = autoUpdate; + return this; + } + + public boolean autoUpdate() { + return autoUpdate; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportAutoPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportAutoPutMappingAction.java index 0921bdcfe11a2..9c3b08ef49add 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportAutoPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportAutoPutMappingAction.java @@ -32,7 +32,7 @@ public class TransportAutoPutMappingAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/mapping/auto_put"); + public static final ActionType TYPE = new ActionType<>("indices:admin/mapping/auto_put"); private static final Logger logger = LogManager.getLogger(TransportAutoPutMappingAction.class); private final MetadataMappingService metadataMappingService; @@ -93,7 +93,7 @@ protected void masterOperation( return; } - performMappingUpdate(concreteIndices, request, listener, metadataMappingService); + performMappingUpdate(concreteIndices, request, listener, metadataMappingService, true); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 489ac9a378254..e93bc1501744d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -45,7 +45,7 @@ */ public class TransportPutMappingAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/mapping/put"); + public static final ActionType TYPE = new ActionType<>("indices:admin/mapping/put"); private static final Logger logger = LogManager.getLogger(TransportPutMappingAction.class); private final MetadataMappingService metadataMappingService; @@ -112,7 +112,7 @@ protected void masterOperation( return; } - performMappingUpdate(concreteIndices, request, listener, metadataMappingService); + performMappingUpdate(concreteIndices, request, listener, metadataMappingService, false); } catch (IndexNotFoundException ex) { logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(request.indices() + "]"), ex); throw ex; @@ -147,7 +147,8 @@ static void performMappingUpdate( Index[] concreteIndices, PutMappingRequest request, ActionListener listener, - MetadataMappingService metadataMappingService + MetadataMappingService metadataMappingService, + boolean autoUpdate ) { final ActionListener wrappedListener = listener.delegateResponse((l, e) -> { logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(concreteIndices) + "]", e); @@ -157,7 +158,8 @@ static void performMappingUpdate( try { updateRequest = new PutMappingClusterStateUpdateRequest(request.source()).indices(concreteIndices) .ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()); + .masterNodeTimeout(request.masterNodeTimeout()) + .autoUpdate(autoUpdate); } catch (IOException e) { wrappedListener.onFailure(e); return; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java index 959ce867949c2..8fb772496baf7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java @@ -16,7 +16,7 @@ public class OpenIndexAction extends ActionType { public static final String NAME = "indices:admin/open"; private OpenIndexAction() { - super(NAME, OpenIndexResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java index 630230437fe91..460be3cf10c1c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java @@ -16,6 +16,6 @@ public class AddIndexBlockAction extends ActionType { public static final String NAME = "indices:admin/block/add"; private AddIndexBlockAction() { - super(NAME, AddIndexBlockResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index 7daf04f41a9fb..ac590d1a4d826 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -45,7 +45,7 @@ public class TransportVerifyShardIndexBlockAction extends TransportReplicationAc ReplicationResponse> { public static final String NAME = AddIndexBlockAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); @Inject public TransportVerifyShardIndexBlockAction( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java index 6b3f22703d223..0f1ad96a6ad0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java @@ -19,6 +19,6 @@ public class RecoveryAction extends ActionType { public static final String NAME = "indices:monitor/recovery"; private RecoveryAction() { - super(NAME, RecoveryResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java index 7d9ca67b9fa9e..d484346e1b8fc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class RefreshAction extends ActionType { +public class RefreshAction extends ActionType { public static final RefreshAction INSTANCE = new RefreshAction(); public static final String NAME = "indices:admin/refresh"; private RefreshAction() { - super(NAME, RefreshResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java index d0f9e99fd08ec..1f703e59980d6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java @@ -19,7 +19,6 @@ * default a refresh is scheduled periodically. * * @see org.elasticsearch.client.internal.IndicesAdminClient#refresh(RefreshRequest) - * @see RefreshResponse */ public class RefreshRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java index 51d569dac0c30..c503ff6ca6930 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; /** @@ -16,7 +17,7 @@ * capabilities depends on the index engine used. For example, the internal one requires refresh to be called, but by * default a refresh is scheduled periodically. */ -public class RefreshRequestBuilder extends BroadcastOperationRequestBuilder { +public class RefreshRequestBuilder extends BroadcastOperationRequestBuilder { public RefreshRequestBuilder(ElasticsearchClient client) { super(client, RefreshAction.INSTANCE, new RefreshRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java deleted file mode 100644 index 5669591a17dc7..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.refresh; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * The response of a refresh action. - */ -public class RefreshResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("refresh", true, arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new RefreshResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - }); - - static { - declareBroadcastFields(PARSER); - } - - RefreshResponse(StreamInput in) throws IOException { - super(in); - } - - public RefreshResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static RefreshResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index 7537e74e2c780..5d6f60216ae05 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; @@ -29,7 +30,7 @@ */ public class TransportRefreshAction extends TransportBroadcastReplicationAction< RefreshRequest, - RefreshResponse, + BroadcastResponse, BasicReplicationRequest, ReplicationResponse> { @@ -62,12 +63,12 @@ protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardI } @Override - protected RefreshResponse newResponse( + protected BroadcastResponse newResponse( int successfulShards, int failedShards, int totalNumCopies, List shardFailures ) { - return new RefreshResponse(totalNumCopies, successfulShards, failedShards, shardFailures); + return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index f4c72d7e37f3d..b3e6385e7099d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -42,7 +42,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction< private static final Logger logger = LogManager.getLogger(TransportShardRefreshAction.class); public static final String NAME = RefreshAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); public static final String SOURCE_API = "api"; private final Executor refreshExecutor; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 1590adeac061d..aaa4901f74fe7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -15,10 +15,10 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -65,9 +65,10 @@ public class ResolveIndexAction extends ActionType public static final ResolveIndexAction INSTANCE = new ResolveIndexAction(); public static final String NAME = "indices:admin/resolve/index"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>(NAME, Response::new); private ResolveIndexAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements IndicesRequest.Replaceable { @@ -493,13 +494,12 @@ protected void doExecute(Task task, Request request, final ActionListener remoteIndices : remoteClusterIndices.entrySet()) { String clusterAlias = remoteIndices.getKey(); OriginalIndices originalIndices = remoteIndices.getValue(); - Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, + var remoteClusterClient = remoteClusterService.getRemoteClusterClient( clusterAlias, EsExecutors.DIRECT_EXECUTOR_SERVICE ); Request remoteRequest = new Request(originalIndices.indices(), originalIndices.indicesOptions()); - remoteClusterClient.admin().indices().resolveIndex(remoteRequest, ActionListener.wrap(response -> { + remoteClusterClient.execute(ResolveIndexAction.REMOTE_TYPE, remoteRequest, ActionListener.wrap(response -> { remoteResponses.put(clusterAlias, response); terminalHandler.run(); }, failure -> terminalHandler.run())); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java index 8ae13f510184a..84ca69bcde77d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java @@ -16,7 +16,7 @@ public class RolloverAction extends ActionType { public static final String NAME = "indices:admin/rollover"; private RolloverAction() { - super(NAME, RolloverResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java index b170cafae98a8..86df6b6d9a755 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java @@ -16,6 +16,6 @@ public class IndicesSegmentsAction extends ActionType { public static final String NAME = "indices:monitor/segments"; private IndicesSegmentsAction() { - super(NAME, IndicesSegmentResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java index 4fe42fff45011..4522519952ea8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java @@ -16,6 +16,6 @@ public class GetSettingsAction extends ActionType { public static final String NAME = "indices:monitor/settings/get"; public GetSettingsAction() { - super(NAME, GetSettingsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java index ec3a5f71c3a48..8a106d1b43d3e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java @@ -15,15 +15,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Objects; @@ -91,61 +88,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(indexToDefaultSettings, StreamOutput::writeWriteable); } - private static void parseSettingsField( - XContentParser parser, - String currentIndexName, - Map indexToSettings, - Map indexToDefaultSettings - ) throws IOException { - - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - switch (parser.currentName()) { - case "settings" -> indexToSettings.put(currentIndexName, Settings.fromXContent(parser)); - case "defaults" -> indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser)); - default -> parser.skipChildren(); - } - } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); - } - parser.nextToken(); - } - - private static void parseIndexEntry( - XContentParser parser, - Map indexToSettings, - Map indexToDefaultSettings - ) throws IOException { - String indexName = parser.currentName(); - parser.nextToken(); - while (parser.isClosed() == false && parser.currentToken() != XContentParser.Token.END_OBJECT) { - parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings); - } - } - - public static GetSettingsResponse fromXContent(XContentParser parser) throws IOException { - HashMap indexToSettings = new HashMap<>(); - HashMap indexToDefaultSettings = new HashMap<>(); - - if (parser.currentToken() == null) { - parser.nextToken(); - } - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - parser.nextToken(); - - while (parser.isClosed() == false) { - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - // we must assume this is an index entry - parseIndexEntry(parser, indexToSettings, indexToDefaultSettings); - } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); - } else { - parser.nextToken(); - } - } - - return new GetSettingsResponse(Map.copyOf(indexToSettings), Map.copyOf(indexToDefaultSettings)); - } - @Override public String toString() { try { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 3d1a8dad9d1bc..164c403d1b516 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -45,7 +45,7 @@ public class TransportUpdateSettingsAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/settings/update"); + public static final ActionType TYPE = new ActionType<>("indices:admin/settings/update"); private static final Logger logger = LogManager.getLogger(TransportUpdateSettingsAction.class); private final MetadataUpdateSettingsService updateSettingsService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 7d091d8278ab7..4b1c5b3f58dd5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -63,7 +63,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc IndicesShardStoresRequest, IndicesShardStoresResponse> { - public static final ActionType TYPE = ActionType.localOnly("indices:monitor/shard_stores"); + public static final ActionType TYPE = new ActionType<>("indices:monitor/shard_stores"); private static final Logger logger = LogManager.getLogger(TransportIndicesShardStoresAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java index dc26e0380fe72..e89ccac18ff2f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java @@ -9,14 +9,15 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -public class ResizeAction extends ActionType { +public class ResizeAction extends ActionType { public static final ResizeAction INSTANCE = new ResizeAction(); public static final String NAME = "indices:admin/resize"; private ResizeAction() { - super(NAME, ResizeResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index 71270cd61b9ed..c39d2e1114618 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -152,7 +153,7 @@ public String getSourceIndex() { * non-negative integer, up to the number of copies per shard (number of replicas + 1), * to wait for the desired amount of shard copies to become active before returning. * Index creation will only wait up until the timeout value for the number of shard copies - * to be active before returning. Check {@link ResizeResponse#isShardsAcknowledged()} to + * to be active before returning. Check {@link CreateIndexResponse#isShardsAcknowledged()} to * determine if the requisite shard copies were all started before returning or timing out. * * @param waitForActiveShards number of active shard copies to wait on diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java index a4972d1a98e7d..a18de15037e49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -8,13 +8,14 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { +public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { public ResizeRequestBuilder(ElasticsearchClient client) { super(client, ResizeAction.INSTANCE, new ResizeRequest()); } @@ -43,7 +44,7 @@ public ResizeRequestBuilder setSettings(Settings settings) { * non-negative integer, up to the number of copies per shard (number of replicas + 1), * to wait for the desired amount of shard copies to become active before returning. * Index creation will only wait up until the timeout value for the number of shard copies - * to be active before returning. Check {@link ResizeResponse#isShardsAcknowledged()} to + * to be active before returning. Check {@link CreateIndexResponse#isShardsAcknowledged()} to * determine if the requisite shard copies were all started before returning or timing out. * * @param waitForActiveShards number of active shard copies to wait on diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java deleted file mode 100644 index 768fc18397519..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -/** - * A response for a resize index action, either shrink or split index. - */ -public final class ResizeResponse extends CreateIndexResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "resize_index", - true, - args -> new ResizeResponse((boolean) args[0], (boolean) args[1], (String) args[2]) - ); - - static { - declareFields(PARSER); - } - - ResizeResponse(StreamInput in) throws IOException { - super(in); - } - - public ResizeResponse(boolean acknowledged, boolean shardsAcknowledged, String index) { - super(acknowledged, shardsAcknowledged, index); - } - - public static ResizeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java index 8ce69309cf59d..129c07b64fd4d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java @@ -9,14 +9,15 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -public class ShrinkAction extends ActionType { +public class ShrinkAction extends ActionType { public static final ShrinkAction INSTANCE = new ShrinkAction(); public static final String NAME = "indices:admin/shrink"; private ShrinkAction() { - super(NAME, ResizeResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index 5686deb6b804a..fbae64dcb6d45 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; @@ -41,7 +42,7 @@ /** * Main class to initiate resizing (shrink / split) an index into a new index */ -public class TransportResizeAction extends TransportMasterNodeAction { +public class TransportResizeAction extends TransportMasterNodeAction { private final MetadataCreateIndexService createIndexService; private final Client client; @@ -86,7 +87,7 @@ protected TransportResizeAction( actionFilters, ResizeRequest::new, indexNameExpressionResolver, - ResizeResponse::new, + CreateIndexResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.createIndexService = createIndexService; @@ -103,7 +104,7 @@ protected void masterOperation( Task task, final ResizeRequest resizeRequest, final ClusterState state, - final ActionListener listener + final ActionListener listener ) { // there is no need to fetch docs stats for split but we keep it simple and do it anyway for simplicity of the code @@ -136,7 +137,11 @@ protected void masterOperation( createIndexService.createIndex( updateRequest, delegatedListener.map( - response -> new ResizeResponse(response.isAcknowledged(), response.isShardsAcknowledged(), updateRequest.index()) + response -> new CreateIndexResponse( + response.isAcknowledged(), + response.isShardsAcknowledged(), + updateRequest.index() + ) ) ); }) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index d0da715b17168..b6345ed0fce4a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -48,7 +48,7 @@ public class CommonStats implements Writeable, ToXContentFragment { private static final TransportVersion VERSION_SUPPORTING_NODE_MAPPINGS = TransportVersions.V_8_5_0; - private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_500_061; + private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_10_X; @Nullable public DocsStats docs; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java deleted file mode 100644 index 1c3f9672f712c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.stats; - -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.tasks.CancellableTask; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; - -public class FieldUsageShardRequest extends BroadcastShardRequest { - - private final String[] fields; - - FieldUsageShardRequest(ShardId shardId, FieldUsageStatsRequest request) { - super(shardId, request); - this.fields = request.fields(); - } - - FieldUsageShardRequest(StreamInput in) throws IOException { - super(in); - this.fields = in.readStringArray(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(fields); - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new CancellableTask(id, type, action, "", parentTaskId, headers) { - @Override - public String getDescription() { - return FieldUsageShardRequest.this.getDescription(); - } - }; - } - - @Override - public String getDescription() { - return "get field usage for shard: [" + shardId() + "], fields: " + Arrays.toString(fields); - } - - public String[] fields() { - return fields; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsAction.java index 825b66f63d812..e7d55aba26605 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsAction.java @@ -16,6 +16,6 @@ public class FieldUsageStatsAction extends ActionType { public static final String NAME = "indices:monitor/field_usage_stats"; private FieldUsageStatsAction() { - super(NAME, FieldUsageStatsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java index c1e006b3a88a7..08ed9df5d2455 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java @@ -9,13 +9,18 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; public class IndicesStatsAction extends ActionType { public static final IndicesStatsAction INSTANCE = new IndicesStatsAction(); public static final String NAME = "indices:monitor/stats"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + IndicesStatsResponse::new + ); private IndicesStatsAction() { - super(NAME, IndicesStatsResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index f90dc894f1b57..477a0bd910719 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -61,7 +61,7 @@ public ShardStats(StreamInput in) throws IOException { isCustomDataPath = in.readBoolean(); seqNoStats = in.readOptionalWriteable(SeqNoStats::new); retentionLeaseStats = in.readOptionalWriteable(RetentionLeaseStats::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { isSearchIdle = in.readBoolean(); searchIdleTime = in.readVLong(); } else { @@ -215,7 +215,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(isCustomDataPath); out.writeOptionalWriteable(seqNoStats); out.writeOptionalWriteable(retentionLeaseStats); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(isSearchIdle); out.writeVLong(searchIdleTime); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java index 9b6f0ad9a66c1..593162305f2d0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java @@ -43,7 +43,7 @@ public class TransportDeleteComponentTemplateAction extends AcknowledgedTransportMasterNodeAction< TransportDeleteComponentTemplateAction.Request> { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/component_template/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/component_template/delete"); private final MetadataIndexTemplateService indexTemplateService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java index 5eada4dd6ace9..f884c8404d0f2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java @@ -43,7 +43,7 @@ public class TransportDeleteComposableIndexTemplateAction extends AcknowledgedTransportMasterNodeAction< TransportDeleteComposableIndexTemplateAction.Request> { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/index_template/delete"); + public static final ActionType TYPE = new ActionType<>("indices:admin/index_template/delete"); private final MetadataIndexTemplateService indexTemplateService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java index 066d0999dd81a..0a7781bf044dd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java @@ -31,7 +31,7 @@ */ public class TransportDeleteIndexTemplateAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/template/delete"); + public static final ActionType TYPE = new ActionType<>("indices:admin/template/delete"); private static final Logger logger = LogManager.getLogger(TransportDeleteIndexTemplateAction.class); private final MetadataIndexTemplateService indexTemplateService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index ae73904a8447b..7d2dad80bf35a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -35,7 +35,7 @@ public class GetComponentTemplateAction extends ActionType indexTemplates() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(indexTemplates, StreamOutput::writeWriteable); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java index 4054d22d7c4fb..67198ccf0c528 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java @@ -15,6 +15,6 @@ public class GetIndexTemplatesAction extends ActionType { @@ -63,7 +63,7 @@ public Request(StreamInput in) throws IOException { super(in); templateName = in.readOptionalString(); indexTemplateRequest = in.readOptionalWriteable(TransportPutComposableIndexTemplateAction.Request::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { includeDefaults = in.readBoolean(); } } @@ -73,7 +73,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(templateName); out.writeOptionalWriteable(indexTemplateRequest); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java index 6d7302e127afe..56e7079ec38ba 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java @@ -31,7 +31,7 @@ public class PutComponentTemplateAction extends ActionType public static final String NAME = "cluster:admin/component_template/put"; private PutComponentTemplateAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java index 9155eac703632..8d259083a1352 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java @@ -52,7 +52,7 @@ public class TransportPutComposableIndexTemplateAction extends AcknowledgedTransportMasterNodeAction< TransportPutComposableIndexTemplateAction.Request> { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/index_template/put"); + public static final ActionType TYPE = new ActionType<>("indices:admin/index_template/put"); private final MetadataIndexTemplateService indexTemplateService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index d1d701e63675f..c2b4e5136e556 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -37,7 +37,7 @@ */ public class TransportPutIndexTemplateAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("indices:admin/template/put"); + public static final ActionType TYPE = new ActionType<>("indices:admin/template/put"); private static final Logger logger = LogManager.getLogger(TransportPutIndexTemplateAction.class); private final MetadataIndexTemplateService indexTemplateService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java index 23757b120a80a..323be06664f4c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java @@ -16,6 +16,6 @@ public class ValidateQueryAction extends ActionType { public static final String NAME = "indices:admin/validate/query"; private ValidateQueryAction() { - super(NAME, ValidateQueryResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java index 70b010b6ca88d..bc72e039e6ded 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java @@ -9,20 +9,14 @@ package org.elasticsearch.action.bulk; import org.elasticsearch.action.ActionType; -import org.elasticsearch.transport.TransportRequestOptions; public class BulkAction extends ActionType { public static final BulkAction INSTANCE = new BulkAction(); public static final String NAME = "indices:data/write/bulk"; - private static final TransportRequestOptions TRANSPORT_REQUEST_OPTIONS = TransportRequestOptions.of( - null, - TransportRequestOptions.Type.BULK - ); - private BulkAction() { - super(NAME, BulkResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index c2b6c666d829a..151e8795d0f82 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -21,26 +21,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id * of the relevant action, and if it has failed or not (with the failure message in case it failed). @@ -49,8 +40,8 @@ public class BulkItemResponse implements Writeable, ToXContentObject { private static final String _INDEX = "_index"; private static final String _ID = "_id"; - private static final String STATUS = "status"; - private static final String ERROR = "error"; + static final String STATUS = "status"; + static final String ERROR = "error"; public RestStatus status() { return failure == null ? response.status() : failure.getStatus(); @@ -80,80 +71,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Reads a {@link BulkItemResponse} from a {@link XContentParser}. - * - * @param parser the {@link XContentParser} - * @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of - * the item in the {@link BulkResponse#getItems} array. - */ - public static BulkItemResponse fromXContent(XContentParser parser, int id) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - final OpType opType = OpType.fromString(currentFieldName); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - - DocWriteResponse.Builder builder = null; - CheckedConsumer itemParser = null; - - if (opType == OpType.INDEX || opType == OpType.CREATE) { - final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); - builder = indexResponseBuilder; - itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder); - - } else if (opType == OpType.UPDATE) { - final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); - builder = updateResponseBuilder; - itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder); - - } else if (opType == OpType.DELETE) { - final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); - builder = deleteResponseBuilder; - itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder); - } else { - throwUnknownField(currentFieldName, parser); - } - - RestStatus status = null; - ElasticsearchException exception = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } - - if (ERROR.equals(currentFieldName)) { - if (token == XContentParser.Token.START_OBJECT) { - exception = ElasticsearchException.fromXContent(parser); - } - } else if (STATUS.equals(currentFieldName)) { - if (token == XContentParser.Token.VALUE_NUMBER) { - status = RestStatus.fromCode(parser.intValue()); - } - } else { - itemParser.accept(parser); - } - } - - ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); - - BulkItemResponse bulkItemResponse; - if (exception != null) { - Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status); - bulkItemResponse = BulkItemResponse.failure(id, opType, failure); - } else { - bulkItemResponse = BulkItemResponse.success(id, opType, builder.build()); - } - return bulkItemResponse; - } - /** * Represents a failure. */ @@ -171,18 +88,6 @@ public static class Failure implements Writeable, ToXContentFragment { private final long term; private final boolean aborted; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "bulk_failures", - true, - a -> new Failure((String) a[0], (String) a[1], (Exception) a[2], RestStatus.fromCode((int) a[3])) - ); - static { - PARSER.declareString(constructorArg(), new ParseField(INDEX_FIELD)); - PARSER.declareString(optionalConstructorArg(), new ParseField(ID_FIELD)); - PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(CAUSE_FIELD)); - PARSER.declareInt(constructorArg(), new ParseField(STATUS_FIELD)); - } - /** * For write failures before operation was assigned a sequence number. * diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java index 78df7fdc25542..28eef30f9185d 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java @@ -293,7 +293,7 @@ public void markOperationAsExecuted(Engine.Result result) { } executionResult = BulkItemResponse.success(current.id(), current.request().opType(), response); // set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though. - executionResult.getResponse().setShardInfo(new ReplicationResponse.ShardInfo()); + executionResult.getResponse().setShardInfo(ReplicationResponse.ShardInfo.EMPTY); locationToSync = TransportWriteAction.locationToSync(locationToSync, result.getTranslogLocation()); } case FAILURE -> { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 6d99f794e972a..7a2bd0e68608a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -438,7 +438,19 @@ public BulkProcessor add( lock.lock(); try { ensureOpen(); - bulkRequest.add(data, defaultIndex, null, null, defaultPipeline, null, null, true, xContentType, RestApiVersion.current()); + bulkRequest.add( + data, + defaultIndex, + null, + null, + defaultPipeline, + null, + null, + null, + true, + xContentType, + RestApiVersion.current() + ); bulkRequestToExecute = newBulkRequestIfNeeded(); } finally { lock.unlock(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index cbe4252a0b6a1..6998ca4150ad5 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -75,6 +75,7 @@ public class BulkRequest extends ActionRequest private String globalRouting; private String globalIndex; private Boolean globalRequireAlias; + private Boolean globalRequireDatsStream; private long sizeInBytes = 0; @@ -232,7 +233,7 @@ public BulkRequest add(byte[] data, int from, int length, @Nullable String defau * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, XContentType xContentType) throws IOException { - return add(data, defaultIndex, null, null, null, null, null, true, xContentType, RestApiVersion.current()); + return add(data, defaultIndex, null, null, null, null, null, null, true, xContentType, RestApiVersion.current()); } /** @@ -240,7 +241,7 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, XCont */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, boolean allowExplicitIndex, XContentType xContentType) throws IOException { - return add(data, defaultIndex, null, null, null, null, null, allowExplicitIndex, xContentType, RestApiVersion.current()); + return add(data, defaultIndex, null, null, null, null, null, null, allowExplicitIndex, xContentType, RestApiVersion.current()); } @@ -251,6 +252,7 @@ public BulkRequest add( @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Boolean defaultRequireAlias, + @Nullable Boolean defaultRequireDataStream, @Nullable Boolean defaultListExecutedPipelines, boolean allowExplicitIndex, XContentType xContentType, @@ -259,6 +261,7 @@ public BulkRequest add( String routing = valueOrDefault(defaultRouting, globalRouting); String pipeline = valueOrDefault(defaultPipeline, globalPipeline); Boolean requireAlias = valueOrDefault(defaultRequireAlias, globalRequireAlias); + Boolean requireDataStream = valueOrDefault(defaultRequireDataStream, globalRequireDatsStream); new BulkRequestParser(true, restApiVersion).parse( data, defaultIndex, @@ -266,6 +269,7 @@ public BulkRequest add( defaultFetchSourceContext, pipeline, requireAlias, + requireDataStream, defaultListExecutedPipelines, allowExplicitIndex, xContentType, @@ -374,6 +378,10 @@ public Boolean requireAlias() { return globalRequireAlias; } + public Boolean requireDataStream() { + return globalRequireDatsStream; + } + /** * Note for internal callers (NOT high level rest client), * the global parameter setting is ignored when used with: @@ -391,6 +399,11 @@ public BulkRequest requireAlias(Boolean globalRequireAlias) { return this; } + public BulkRequest requireDataStream(Boolean globalRequireDatsStream) { + this.globalRequireDatsStream = globalRequireDatsStream; + return this; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java index f1280587a0c55..5dccd1b55f554 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java @@ -61,6 +61,7 @@ public final class BulkRequestParser { private static final ParseField IF_SEQ_NO = new ParseField("if_seq_no"); private static final ParseField IF_PRIMARY_TERM = new ParseField("if_primary_term"); private static final ParseField REQUIRE_ALIAS = new ParseField(DocWriteRequest.REQUIRE_ALIAS); + private static final ParseField REQUIRE_DATA_STREAM = new ParseField(DocWriteRequest.REQUIRE_DATA_STREAM); private static final ParseField LIST_EXECUTED_PIPELINES = new ParseField(DocWriteRequest.LIST_EXECUTED_PIPELINES); private static final ParseField DYNAMIC_TEMPLATES = new ParseField("dynamic_templates"); @@ -127,6 +128,7 @@ public void parse( @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Boolean defaultRequireAlias, + @Nullable Boolean defaultRequireDataStream, @Nullable Boolean defaultListExecutedPipelines, boolean allowExplicitIndex, XContentType xContentType, @@ -209,6 +211,7 @@ public void parse( int retryOnConflict = 0; String pipeline = defaultPipeline; boolean requireAlias = defaultRequireAlias != null && defaultRequireAlias; + boolean requireDataStream = defaultRequireDataStream != null && defaultRequireDataStream; boolean listExecutedPipelines = defaultListExecutedPipelines != null && defaultListExecutedPipelines; Map dynamicTemplates = Map.of(); @@ -263,6 +266,8 @@ public void parse( fetchSourceContext = FetchSourceContext.fromXContent(parser); } else if (REQUIRE_ALIAS.match(currentFieldName, parser.getDeprecationHandler())) { requireAlias = parser.booleanValue(); + } else if (REQUIRE_DATA_STREAM.match(currentFieldName, parser.getDeprecationHandler())) { + requireDataStream = parser.booleanValue(); } else if (LIST_EXECUTED_PIPELINES.match(currentFieldName, parser.getDeprecationHandler())) { listExecutedPipelines = parser.booleanValue(); } else { @@ -349,6 +354,7 @@ public void parse( .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) .setDynamicTemplates(dynamicTemplates) .setRequireAlias(requireAlias) + .setRequireDataStream(requireDataStream) .setListExecutedPipelines(listExecutedPipelines), type ); @@ -365,6 +371,7 @@ public void parse( .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) .setDynamicTemplates(dynamicTemplates) .setRequireAlias(requireAlias) + .setRequireDataStream(requireDataStream) .setListExecutedPipelines(listExecutedPipelines), type ); @@ -382,6 +389,7 @@ public void parse( .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) .setDynamicTemplates(dynamicTemplates) .setRequireAlias(requireAlias) + .setRequireDataStream(requireDataStream) .setListExecutedPipelines(listExecutedPipelines), type ); @@ -391,6 +399,12 @@ public void parse( "Update requests do not support versioning. " + "Please use `if_seq_no` and `if_primary_term` instead" ); } + if (requireDataStream) { + throw new IllegalArgumentException( + "Update requests do not support the `require_data_stream` flag, " + + "as data streams do not support update operations" + ); + } // TODO: support dynamic_templates in update requests if (dynamicTemplates.isEmpty() == false) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 0ce472520a4fd..111dbfb0f7af6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -12,31 +12,24 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** * A response of a bulk execution. Holding a response for each item responding (in order) of the * bulk requests. Each item holds the index/type/id is operated on, and if it failed or not (with the * failure message). */ -public class BulkResponse extends ActionResponse implements Iterable, ToXContentObject { +public class BulkResponse extends ActionResponse implements Iterable, ChunkedToXContentObject { - private static final String ITEMS = "items"; - private static final String ERRORS = "errors"; - private static final String TOOK = "took"; - private static final String INGEST_TOOK = "ingest_took"; + static final String ITEMS = "items"; + static final String ERRORS = "errors"; + static final String TOOK = "took"; + static final String INGEST_TOOK = "ingest_took"; public static final long NO_INGEST_TOOK = -1L; @@ -134,54 +127,15 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ERRORS, hasFailures()); - builder.field(TOOK, tookInMillis); - if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { - builder.field(INGEST_TOOK, ingestTookInMillis); - } - builder.startArray(ITEMS); - for (BulkItemResponse item : this) { - item.toXContent(builder, params); - } - builder.endArray(); - builder.endObject(); - return builder; - } - - public static BulkResponse fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - - long took = -1L; - long ingestTook = NO_INGEST_TOOK; - List items = new ArrayList<>(); - - String currentFieldName = parser.currentName(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (TOOK.equals(currentFieldName)) { - took = parser.longValue(); - } else if (INGEST_TOOK.equals(currentFieldName)) { - ingestTook = parser.longValue(); - } else if (ERRORS.equals(currentFieldName) == false) { - throwUnknownField(currentFieldName, parser); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (ITEMS.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - items.add(BulkItemResponse.fromXContent(parser, items.size())); - } - } else { - throwUnknownField(currentFieldName, parser); - } - } else { - throwUnknownToken(token, parser); + public Iterator toXContentChunked(ToXContent.Params params) { + return Iterators.concat(Iterators.single((builder, p) -> { + builder.startObject(); + builder.field(ERRORS, hasFailures()); + builder.field(TOOK, tookInMillis); + if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { + builder.field(INGEST_TOOK, ingestTookInMillis); } - } - return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook); + return builder.startArray(ITEMS); + }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject())); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java index ab1fa9f44c293..3eeb96546c9b0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java @@ -34,10 +34,6 @@ public BulkShardResponse(ShardId shardId, BulkItemResponse[] responses) { this.responses = responses; } - public ShardId getShardId() { - return shardId; - } - public BulkItemResponse[] getResponses() { return responses; } @@ -60,6 +56,6 @@ public void setForcedRefresh(boolean forcedRefresh) { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); - out.writeArray((o, item) -> item.writeThin(out), responses); + out.writeArray((o, item) -> item.writeThin(o), responses); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkAction.java index a799c60fe7b38..089869395c997 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkAction.java @@ -16,6 +16,6 @@ public class SimulateBulkAction extends ActionType { public static final String NAME = "indices:data/write/simulate/bulk"; private SimulateBulkAction() { - super(NAME, BulkResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index ea0399d0b87fe..de11a57a237df 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; @@ -60,7 +61,6 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.SystemIndices; @@ -73,7 +73,6 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -81,10 +80,10 @@ import java.util.Set; import java.util.SortedMap; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.function.LongSupplier; import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; @@ -106,7 +105,7 @@ public class TransportBulkAction extends HandledTransportAction indices = bulkRequest.requests.stream() + final Map indices = bulkRequest.requests.stream() // delete requests should not attempt to create the index (if the index does not // exist), unless an external versioning is used .filter( @@ -354,20 +353,23 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec || request.versionType() == VersionType.EXTERNAL || request.versionType() == VersionType.EXTERNAL_GTE ) - .collect(Collectors.toMap(DocWriteRequest::index, DocWriteRequest::isRequireAlias, (v1, v2) -> v1 || v2)); + .collect( + Collectors.toMap( + DocWriteRequest::index, + request -> new ReducedRequestInfo(request.isRequireAlias(), request.isRequireDataStream()), + ReducedRequestInfo::merge + ) + ); // Step 2: filter the list of indices to find those that don't currently exist. final Map indicesThatCannotBeCreated = new HashMap<>(); - Set autoCreateIndices = new HashSet<>(); - ClusterState state = clusterService.state(); - for (Map.Entry indexAndFlag : indices.entrySet()) { - final String index = indexAndFlag.getKey(); - boolean shouldAutoCreate = indexNameExpressionResolver.hasIndexAbstraction(index, state) == false; + final ClusterState state = clusterService.state(); + Map indicesToAutoCreate = indices.entrySet() + .stream() + .filter(entry -> indexNameExpressionResolver.hasIndexAbstraction(entry.getKey(), state) == false) // We should only auto create if we are not requiring it to be an alias - if (shouldAutoCreate && (indexAndFlag.getValue() == false)) { - autoCreateIndices.add(index); - } - } + .filter(entry -> entry.getValue().isRequireAlias == false) + .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().isRequireDataStream)); // Step 3: Collect all the data streams that need to be rolled over before writing Set dataStreamsToBeRolledOver = indices.keySet().stream().filter(target -> { @@ -381,7 +383,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec bulkRequest, executorName, listener, - autoCreateIndices, + indicesToAutoCreate, dataStreamsToBeRolledOver, indicesThatCannotBeCreated, startTime @@ -397,14 +399,14 @@ protected void createMissingIndicesAndIndexData( BulkRequest bulkRequest, String executorName, ActionListener listener, - Set autoCreateIndices, + Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, Map indicesThatCannotBeCreated, long startTime ) { final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions - if (autoCreateIndices.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { + if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); return; } @@ -415,8 +417,9 @@ protected void doRun() { } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { - for (String index : autoCreateIndices) { - createIndex(index, bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { + for (Map.Entry indexEntry : indicesToAutoCreate.entrySet()) { + final String index = indexEntry.getKey(); + createIndex(index, indexEntry.getValue(), bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { @Override public void onResponse(CreateIndexResponse createIndexResponse) {} @@ -564,9 +567,10 @@ private static boolean isSystemIndex(SortedMap indices } } - void createIndex(String index, TimeValue timeout, ActionListener listener) { + void createIndex(String index, boolean requireDataStream, TimeValue timeout, ActionListener listener) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.index(index); + createIndexRequest.requireDataStream(requireDataStream); createIndexRequest.cause("auto(bulk api)"); createIndexRequest.masterNodeTimeout(timeout); client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener); @@ -597,6 +601,15 @@ protected long buildTookInMillis(long startTimeNanos) { return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos); } + private record ReducedRequestInfo(boolean isRequireAlias, boolean isRequireDataStream) { + private ReducedRequestInfo merge(ReducedRequestInfo other) { + return new ReducedRequestInfo( + this.isRequireAlias || other.isRequireAlias, + this.isRequireDataStream || other.isRequireDataStream + ); + } + } + /** * retries on retryable cluster blocks, resolves item requests, * constructs shard bulk requests and delegates execution to shard bulk action @@ -638,6 +651,11 @@ protected void doRun() { if (handleBlockExceptions(clusterState)) { return; } + Map> requestsByShard = groupRequestsByShards(clusterState); + executeBulkRequestsByShard(requestsByShard, clusterState); + } + + private Map> groupRequestsByShards(ClusterState clusterState) { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); Metadata metadata = clusterState.metadata(); // Group the requests by ShardId -> Operations mapping @@ -655,8 +673,11 @@ protected void doRun() { if (addFailureIfIndexCannotBeCreated(docWriteRequest, i)) { continue; } + if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, i, metadata)) { + continue; + } IndexAbstraction ia = null; - boolean includeDataStreams = docWriteRequest.opType() == DocWriteRequest.OpType.CREATE; + boolean includeDataStreams = docWriteRequest.opType() == OpType.CREATE; try { ia = concreteIndices.resolveIfAbsent(docWriteRequest); if (ia.isDataStreamRelated() && includeDataStreams == false) { @@ -668,8 +689,7 @@ protected void doRun() { if (ia.getParentDataStream() != null && // avoid valid cases when directly indexing into a backing index // (for example when directly indexing into .ds-logs-foobar-000001) - ia.getName().equals(docWriteRequest.index()) == false - && docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) { + ia.getName().equals(docWriteRequest.index()) == false && docWriteRequest.opType() != OpType.CREATE) { throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); } @@ -698,7 +718,10 @@ protected void doRun() { bulkRequest.requests.set(i, null); } } + return requestsByShard; + } + private void executeBulkRequestsByShard(Map> requestsByShard, ClusterState clusterState) { if (requestsByShard.isEmpty()) { listener.onResponse( new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) @@ -706,60 +729,62 @@ protected void doRun() { return; } - final AtomicInteger counter = new AtomicInteger(requestsByShard.size()); String nodeId = clusterService.localNode().getId(); - for (Map.Entry> entry : requestsByShard.entrySet()) { - final ShardId shardId = entry.getKey(); - final List requests = entry.getValue(); - BulkShardRequest bulkShardRequest = new BulkShardRequest( - shardId, - bulkRequest.getRefreshPolicy(), - requests.toArray(new BulkItemRequest[0]) + Runnable onBulkItemsComplete = () -> { + listener.onResponse( + new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) ); - bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); - bulkShardRequest.timeout(bulkRequest.timeout()); - bulkShardRequest.routedBasedOnClusterVersion(clusterState.version()); - if (task != null) { - bulkShardRequest.setParentTask(nodeId, task.getId()); - } - client.executeLocally(TransportShardBulkAction.TYPE, bulkShardRequest, new ActionListener<>() { - @Override - public void onResponse(BulkShardResponse bulkShardResponse) { - for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { - // we may have no response if item failed - if (bulkItemResponse.getResponse() != null) { - bulkItemResponse.getResponse().setShardInfo(bulkShardResponse.getShardInfo()); - } - responses.set(bulkItemResponse.getItemId(), bulkItemResponse); - } - maybeFinishHim(); + // Allow memory for bulk shard request items to be reclaimed before all items have been completed + bulkRequest = null; + }; + + try (RefCountingRunnable bulkItemRequestCompleteRefCount = new RefCountingRunnable(onBulkItemsComplete)) { + for (Map.Entry> entry : requestsByShard.entrySet()) { + final ShardId shardId = entry.getKey(); + final List requests = entry.getValue(); + + BulkShardRequest bulkShardRequest = new BulkShardRequest( + shardId, + bulkRequest.getRefreshPolicy(), + requests.toArray(new BulkItemRequest[0]) + ); + bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); + bulkShardRequest.timeout(bulkRequest.timeout()); + bulkShardRequest.routedBasedOnClusterVersion(clusterState.version()); + if (task != null) { + bulkShardRequest.setParentTask(nodeId, task.getId()); } + executeBulkShardRequest(bulkShardRequest, bulkItemRequestCompleteRefCount.acquire()); + } + } + } - @Override - public void onFailure(Exception e) { - // create failures for all relevant requests - for (BulkItemRequest request : requests) { - final String indexName = request.index(); - DocWriteRequest docWriteRequest = request.request(); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e); - responses.set(request.id(), BulkItemResponse.failure(request.id(), docWriteRequest.opType(), failure)); + private void executeBulkShardRequest(BulkShardRequest bulkShardRequest, Releasable releaseOnFinish) { + client.executeLocally(TransportShardBulkAction.TYPE, bulkShardRequest, new ActionListener<>() { + @Override + public void onResponse(BulkShardResponse bulkShardResponse) { + for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { + // we may have no response if item failed + if (bulkItemResponse.getResponse() != null) { + bulkItemResponse.getResponse().setShardInfo(bulkShardResponse.getShardInfo()); } - maybeFinishHim(); + responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } + releaseOnFinish.close(); + } - private void maybeFinishHim() { - if (counter.decrementAndGet() == 0) { - listener.onResponse( - new BulkResponse( - responses.toArray(new BulkItemResponse[responses.length()]), - buildTookInMillis(startTimeNanos) - ) - ); - } + @Override + public void onFailure(Exception e) { + // create failures for all relevant requests + for (BulkItemRequest request : bulkShardRequest.items()) { + final String indexName = request.index(); + DocWriteRequest docWriteRequest = request.request(); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e); + responses.set(request.id(), BulkItemResponse.failure(request.id(), docWriteRequest.opType(), failure)); } - }); - } - bulkRequest = null; // allow memory for bulk request items to be reclaimed before all items have been completed + releaseOnFinish.close(); + } + }); } private boolean handleBlockExceptions(ClusterState state) { @@ -829,6 +854,22 @@ private boolean addFailureIfRequiresAliasAndAliasIsMissing(DocWriteRequest re return false; } + private boolean addFailureIfRequiresDataStreamAndNoParentDataStream(DocWriteRequest request, int idx, final Metadata metadata) { + if (request.isRequireDataStream() && (metadata.indexIsADataStream(request.index()) == false)) { + Exception exception = new ResourceNotFoundException( + "[" + + DocWriteRequest.REQUIRE_DATA_STREAM + + "] request flag is [true] and [" + + request.index() + + "] is not a data stream", + request.index() + ); + addFailure(request, idx, exception); + return true; + } + return false; + } + private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index concreteIndex, int idx, final Metadata metadata) { IndexMetadata indexMetadata = metadata.getIndexSafe(concreteIndex); if (indexMetadata.getState() == IndexMetadata.State.CLOSE) { @@ -881,10 +922,12 @@ private static class ConcreteIndices { IndexAbstraction resolveIfAbsent(DocWriteRequest request) { try { - return indexAbstractions.computeIfAbsent( - request.index(), - key -> indexNameExpressionResolver.resolveWriteIndexAbstraction(state, request) - ); + IndexAbstraction indexAbstraction = indexAbstractions.get(request.index()); + if (indexAbstraction == null) { + indexAbstraction = indexNameExpressionResolver.resolveWriteIndexAbstraction(state, request); + indexAbstractions.put(request.index(), indexAbstraction); + } + return indexAbstraction; } catch (IndexNotFoundException e) { if (e.getMetadataKeys().contains(EXCLUDED_DATA_STREAMS_KEY)) { throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams", e); @@ -895,7 +938,12 @@ IndexAbstraction resolveIfAbsent(DocWriteRequest request) { } IndexRouting routing(Index index) { - return routings.computeIfAbsent(index, idx -> IndexRouting.fromIndexMetadata(state.metadata().getIndexSafe(idx))); + IndexRouting routing = routings.get(index); + if (routing == null) { + routing = IndexRouting.fromIndexMetadata(state.metadata().getIndexSafe(index)); + routings.put(index, routing); + } + return routing; } } @@ -1017,48 +1065,82 @@ ActionListener wrapActionListenerIfNeeded(long ingestTookInMillis, ); } else { return actionListener.map(response -> { - BulkItemResponse[] items = response.getItems(); - for (int i = 0; i < items.length; i++) { - itemResponses.add(originalSlots.get(i), response.getItems()[i]); + // these items are the responses from the subsequent bulk request, their 'slots' + // are not correct for this response we're building + final BulkItemResponse[] bulkResponses = response.getItems(); + + final BulkItemResponse[] allResponses = new BulkItemResponse[bulkResponses.length + itemResponses.size()]; + + // the item responses are from the original request, so their slots are correct. + // these are the responses for requests that failed early and were not passed on to the subsequent bulk. + for (BulkItemResponse item : itemResponses) { + allResponses[item.getItemId()] = item; } - return new BulkResponse( - itemResponses.toArray(new BulkItemResponse[0]), - response.getTook().getMillis(), - ingestTookInMillis - ); + + // use the original slots for the responses from the bulk + for (int i = 0; i < bulkResponses.length; i++) { + allResponses[originalSlots.get(i)] = bulkResponses[i]; + } + + if (Assertions.ENABLED) { + assertResponsesAreCorrect(bulkResponses, allResponses); + } + + return new BulkResponse(allResponses, response.getTook().getMillis(), ingestTookInMillis); }); } } - synchronized void markItemAsDropped(int slot) { - IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(slot)); - failedSlots.set(slot); - final String id = indexRequest.id() == null ? DROPPED_ITEM_WITH_AUTO_GENERATED_ID : indexRequest.id(); - itemResponses.add( - BulkItemResponse.success( - slot, - indexRequest.opType(), - new UpdateResponse( - new ShardId(indexRequest.index(), IndexMetadata.INDEX_UUID_NA_VALUE, 0), - id, - SequenceNumbers.UNASSIGNED_SEQ_NO, - SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - indexRequest.version(), - DocWriteResponse.Result.NOOP - ) - ) - ); + private void assertResponsesAreCorrect(BulkItemResponse[] bulkResponses, BulkItemResponse[] allResponses) { + // check for an empty intersection between the ids + final Set failedIds = itemResponses.stream().map(BulkItemResponse::getItemId).collect(Collectors.toSet()); + final Set responseIds = IntStream.range(0, bulkResponses.length) + .map(originalSlots::get) // resolve subsequent bulk ids back to the original slots + .boxed() + .collect(Collectors.toSet()); + assert Sets.haveEmptyIntersection(failedIds, responseIds) + : "bulk item response slots cannot have failed and been processed in the subsequent bulk request, failed ids: " + + failedIds + + ", response ids: " + + responseIds; + + // check for the correct number of responses + final int expectedResponseCount = bulkRequest.requests.size(); + final int actualResponseCount = failedIds.size() + responseIds.size(); + assert expectedResponseCount == actualResponseCount + : "Expected [" + expectedResponseCount + "] responses, but found [" + actualResponseCount + "]"; + + // check that every response is present + for (int i = 0; i < allResponses.length; i++) { + assert allResponses[i] != null : "BulkItemResponse at index [" + i + "] was null"; + } } synchronized void markItemAsFailed(int slot, Exception e) { - IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(slot)); + final DocWriteRequest docWriteRequest = bulkRequest.requests().get(slot); + final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID); // We hit a error during preprocessing a request, so we: - // 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed + // 1) Remember the request item slot from the bulk, so that when we're done processing all requests we know what failed // 2) Add a bulk item failure for this request // 3) Continue with the next request in the bulk. failedSlots.set(slot); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.id(), e); - itemResponses.add(BulkItemResponse.failure(slot, indexRequest.opType(), failure)); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(docWriteRequest.index(), id, e); + itemResponses.add(BulkItemResponse.failure(slot, docWriteRequest.opType(), failure)); + } + + synchronized void markItemAsDropped(int slot) { + final DocWriteRequest docWriteRequest = bulkRequest.requests().get(slot); + final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID); + failedSlots.set(slot); + UpdateResponse dropped = new UpdateResponse( + new ShardId(docWriteRequest.index(), IndexMetadata.INDEX_UUID_NA_VALUE, 0), + id, + UNASSIGNED_SEQ_NO, + UNASSIGNED_PRIMARY_TERM, + docWriteRequest.version(), + DocWriteResponse.Result.NOOP + ); + itemResponses.add(BulkItemResponse.success(slot, docWriteRequest.opType(), dropped)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index e6d5bdcc46696..2ce276a7c0524 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -76,7 +76,7 @@ public class TransportShardBulkAction extends TransportWriteAction { public static final String ACTION_NAME = BulkAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, BulkShardResponse::new); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private static final Logger logger = LogManager.getLogger(TransportShardBulkAction.class); @@ -377,7 +377,7 @@ static boolean executeBulkItemRequest( .merge( MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(result.getRequiredMappingUpdate()), - MapperService.MergeReason.MAPPING_UPDATE_PREFLIGHT + MapperService.MergeReason.MAPPING_AUTO_UPDATE_PREFLIGHT ) ).map(DocumentMapper::mappingSource); Optional previousSource = Optional.ofNullable(primary.mapperService().documentMapper()) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index a44c8091aaa2e..e77d4ab9e0b85 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -69,7 +69,7 @@ protected void createMissingIndicesAndIndexData( BulkRequest bulkRequest, String executorName, ActionListener listener, - Set autoCreateIndices, + Map indicesToAutoCreate, Set dataStreamsToRollover, Map indicesThatCannotBeCreated, long startTime diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index 4ecb092f34d4b..f9e559fa16ec7 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -27,7 +27,7 @@ public class CreateDataStreamAction extends ActionType { public static final String NAME = "indices:admin/data_stream/create"; private CreateDataStreamAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements IndicesRequest { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java index 4457fc9e9a0f4..9a4eaf9a78e9b 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java @@ -32,7 +32,7 @@ public class DataStreamsStatsAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java index e23accd351a26..b68a7d3fcd159 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java @@ -30,7 +30,7 @@ public class DeleteDataStreamAction extends ActionType { public static final String NAME = "indices:admin/data_stream/delete"; private DeleteDataStreamAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest implements IndicesRequest.Replaceable { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 073ac021f787a..079c8f8b01ceb 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -36,7 +36,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.DATA_STREAM_RESPONSE_INDEX_PROPERTIES; +import static org.elasticsearch.TransportVersions.V_8_11_X; public class GetDataStreamAction extends ActionType { @@ -44,7 +44,7 @@ public class GetDataStreamAction extends ActionType implements IndicesRequest.Replaceable { @@ -75,7 +75,7 @@ public Request(StreamInput in) throws IOException { super(in); this.names = in.readOptionalStringArray(); this.indicesOptions = IndicesOptions.readIndicesOptions(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.includeDefaults = in.readBoolean(); } else { this.includeDefaults = false; @@ -87,7 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalStringArray(names); indicesOptions.writeIndicesOptions(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } @@ -217,10 +217,8 @@ public DataStreamInfo( in.readOptionalString(), in.readOptionalString(), in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null, - in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) - ? in.readMap(Index::new, IndexProperties::new) - : Map.of(), - in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) ? in.readBoolean() : true + in.getTransportVersion().onOrAfter(V_8_11_X) ? in.readMap(Index::new, IndexProperties::new) : Map.of(), + in.getTransportVersion().onOrAfter(V_8_11_X) ? in.readBoolean() : true ); } @@ -264,7 +262,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { out.writeOptionalWriteable(timeSeries); } - if (out.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES)) { + if (out.getTransportVersion().onOrAfter(V_8_11_X)) { out.writeMap(indexSettingsValues); out.writeBoolean(templatePreferIlmValue); } @@ -481,9 +479,7 @@ public Response(List dataStreams, @Nullable RolloverConfiguratio public Response(StreamInput in) throws IOException { this( in.readCollectionAsList(DataStreamInfo::new), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) - ? in.readOptionalWriteable(RolloverConfiguration::new) - : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null ); } @@ -499,7 +495,7 @@ public RolloverConfiguration getRolloverConfiguration() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreams); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java index 0f69a68bbfbe8..3a834273e84cf 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java @@ -27,7 +27,7 @@ public class MigrateToDataStreamAction extends ActionType public static final String NAME = "indices:admin/data_stream/migrate"; private MigrateToDataStreamAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements IndicesRequest { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/ModifyDataStreamsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/ModifyDataStreamsAction.java index d2a04305cb2ba..308b8ccfd5064 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/ModifyDataStreamsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/ModifyDataStreamsAction.java @@ -36,7 +36,7 @@ public class ModifyDataStreamsAction extends ActionType { public static final String NAME = "indices:admin/data_stream/modify"; private ModifyDataStreamsAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static final class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java index f58e9497949bf..3b3e644272cbc 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java @@ -27,7 +27,7 @@ public class PromoteDataStreamAction extends ActionType { public static final String NAME = "indices:admin/data_stream/promote"; private PromoteDataStreamAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest implements IndicesRequest { diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java index 70f0a9a12e02e..e4b8edea58114 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java @@ -231,6 +231,11 @@ public boolean isRequireAlias() { return false; } + @Override + public boolean isRequireDataStream() { + return false; + } + @Override public void process(IndexRouting indexRouting) { // Nothing to do diff --git a/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index 6c275d994a4ed..6abe033637c56 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -24,7 +24,7 @@ public class TransportDeleteAction extends TransportSingleItemBulkWriteAction { public static final String NAME = "indices:data/write/delete"; - public static final ActionType TYPE = new ActionType<>(NAME, DeleteResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); @Inject public TransportDeleteAction(TransportService transportService, ActionFilters actionFilters, TransportBulkAction bulkAction) { diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index 61c979f9494b5..08d315fe39ce5 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -35,7 +35,7 @@ public class DownsampleAction extends ActionType { public static final TimeValue DEFAULT_WAIT_TIMEOUT = new TimeValue(1, TimeUnit.DAYS); private DownsampleAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest implements IndicesRequest, ToXContentObject { @@ -62,7 +62,7 @@ public Request(StreamInput in) throws IOException { super(in); sourceIndex = in.readString(); targetIndex = in.readString(); - waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? TimeValue.parseTimeValue(in.readString(), "timeout") : DEFAULT_WAIT_TIMEOUT; downsampleConfig = new DownsampleConfig(in); @@ -89,7 +89,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(sourceIndex); out.writeString(targetIndex); out.writeString( - out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? waitTimeout.getStringRep() : DEFAULT_WAIT_TIMEOUT.getStringRep() ); diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index d889f8fac8113..d2d7a945520c1 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -49,7 +49,7 @@ // TODO: AggregatedDfs. Currently the idf can be different then when executing a normal search with explain. public class TransportExplainAction extends TransportSingleShardAction { - public static final ActionType TYPE = new ActionType<>("indices:data/read/explain", ExplainResponse::new); + public static final ActionType TYPE = new ActionType<>("indices:data/read/explain"); private final SearchService searchService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index 06ea2dee17481..722808af879d6 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -81,7 +81,7 @@ static List readList(StreamInput input) throws I responses.add(new FieldCapabilitiesIndexResponse(input)); } final int groups = input.readVInt(); - if (input.getTransportVersion().onOrAfter(TransportVersions.COMPACT_FIELD_CAPS_ADDED)) { + if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { collectCompressedResponses(input, groups, responses); } else { collectResponsesLegacyFormat(input, groups, responses); @@ -139,7 +139,7 @@ static void writeList(StreamOutput output, List } output.writeCollection(ungroupedResponses); - if (output.getTransportVersion().onOrAfter(TransportVersions.COMPACT_FIELD_CAPS_ADDED)) { + if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { writeCompressedResponses(output, groupedResponsesMap); } else { writeResponsesLegacyFormat(output, groupedResponsesMap); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 2e3dd1ab443f9..fa396b1ad18bf 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -14,11 +14,11 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.RefCountingRunnable; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -65,7 +65,11 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction { public static final String NAME = "indices:data/read/field_caps"; - public static final ActionType TYPE = new ActionType<>(NAME, FieldCapabilitiesResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + FieldCapabilitiesResponse::new + ); public static final String ACTION_NODE_NAME = NAME + "[n]"; public static final Logger LOGGER = LogManager.getLogger(TransportFieldCapabilitiesAction.class); @@ -213,8 +217,8 @@ private void doExecuteForked(Task task, FieldCapabilitiesRequest request, final for (Map.Entry remoteIndices : remoteClusterIndices.entrySet()) { String clusterAlias = remoteIndices.getKey(); OriginalIndices originalIndices = remoteIndices.getValue(); - Client remoteClusterClient = transportService.getRemoteClusterService() - .getRemoteClusterClient(threadPool, clusterAlias, searchCoordinationExecutor); + var remoteClusterClient = transportService.getRemoteClusterService() + .getRemoteClusterClient(clusterAlias, searchCoordinationExecutor); FieldCapabilitiesRequest remoteRequest = prepareRemoteRequest(request, originalIndices, nowInMillis); ActionListener remoteListener = ActionListener.wrap(response -> { for (FieldCapabilitiesIndexResponse resp : response.getIndexResponses()) { @@ -234,7 +238,11 @@ private void doExecuteForked(Task task, FieldCapabilitiesRequest request, final handleIndexFailure.accept(RemoteClusterAware.buildRemoteIndexName(clusterAlias, index), ex); } }); - remoteClusterClient.fieldCaps(remoteRequest, ActionListener.releaseAfter(remoteListener, refs.acquire())); + remoteClusterClient.execute( + TransportFieldCapabilitiesAction.REMOTE_TYPE, + remoteRequest, + ActionListener.releaseAfter(remoteListener, refs.acquire()) + ); } } } diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index 6871c60f11a15..5b407d0ebceb0 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; @@ -19,11 +18,9 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Iterator; -import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -149,37 +146,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return getResult.toXContent(builder, params); } - /** - * This method can be used to parse a {@link GetResponse} object when it has been printed out - * as a xcontent using the {@link #toXContent(XContentBuilder, Params)} method. - *

    - * For forward compatibility reason this method might not fail if it tries to parse a field it - * doesn't know. But before returning the result it will check that enough information were - * parsed to return a valid {@link GetResponse} instance and throws a {@link ParsingException} - * otherwise. This is the case when we get a 404 back, which can be parsed as a normal - * {@link GetResponse} with found set to false, or as an elasticsearch exception. The caller - * of this method needs a way to figure out whether we got back a valid get response, which - * can be done by catching ParsingException. - * - * @param parser {@link XContentParser} to parse the response from - * @return a {@link GetResponse} - * @throws IOException is an I/O exception occurs during the parsing - */ - public static GetResponse fromXContent(XContentParser parser) throws IOException { - GetResult getResult = GetResult.fromXContent(parser); - - // At this stage we ensure that we parsed enough information to return - // a valid GetResponse instance. If it's not the case, we throw an - // exception so that callers know it and can handle it correctly. - if (getResult.getIndex() == null && getResult.getId() == null) { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Missing required fields [%s,%s]", GetResult._INDEX, GetResult._ID) - ); - } - return new GetResponse(getResult); - } - @Override public void writeTo(StreamOutput out) throws IOException { getResult.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index e91329e810397..4f548e227dcfb 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -15,30 +15,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.rest.action.document.RestMultiGetAction; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; public class MultiGetResponse extends ActionResponse implements Iterable, ToXContentObject { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MultiGetResponse.class); - private static final ParseField INDEX = new ParseField("_index"); - private static final ParseField TYPE = new ParseField("_type"); - private static final ParseField ID = new ParseField("_id"); - private static final ParseField ERROR = new ParseField("error"); - private static final ParseField DOCS = new ParseField("docs"); + static final ParseField INDEX = new ParseField("_index"); + static final ParseField ID = new ParseField("_id"); + static final ParseField DOCS = new ParseField("docs"); /** * Represents a failure. @@ -151,80 +141,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static MultiGetResponse fromXContent(XContentParser parser) throws IOException { - String currentFieldName = null; - List items = new ArrayList<>(); - for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) { - switch (token) { - case FIELD_NAME: - currentFieldName = parser.currentName(); - break; - case START_ARRAY: - if (DOCS.getPreferredName().equals(currentFieldName)) { - for (token = parser.nextToken(); token != Token.END_ARRAY; token = parser.nextToken()) { - if (token == Token.START_OBJECT) { - items.add(parseItem(parser)); - } - } - } - break; - default: - // If unknown tokens are encounter then these should be ignored, because - // this is parsing logic on the client side. - break; - } - } - return new MultiGetResponse(items.toArray(new MultiGetItemResponse[0])); - } - - private static MultiGetItemResponse parseItem(XContentParser parser) throws IOException { - String currentFieldName = null; - String index = null; - String id = null; - ElasticsearchException exception = null; - GetResult getResult = null; - for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) { - switch (token) { - case FIELD_NAME: - currentFieldName = parser.currentName(); - if (INDEX.match(currentFieldName, parser.getDeprecationHandler()) == false - && ID.match(currentFieldName, parser.getDeprecationHandler()) == false - && ERROR.match(currentFieldName, parser.getDeprecationHandler()) == false) { - getResult = GetResult.fromXContentEmbedded(parser, index, id); - } - break; - case VALUE_STRING: - if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) { - index = parser.text(); - } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical("mget_with_types", RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { - id = parser.text(); - } - break; - case START_OBJECT: - if (ERROR.match(currentFieldName, parser.getDeprecationHandler())) { - exception = ElasticsearchException.fromXContent(parser); - } - break; - default: - // If unknown tokens are encounter then these should be ignored, because - // this is parsing logic on the client side. - break; - } - if (getResult != null) { - break; - } - } - - if (exception != null) { - return new MultiGetItemResponse(null, new Failure(index, id, exception)); - } else { - GetResponse getResponse = new GetResponse(getResult); - return new MultiGetItemResponse(getResponse, null); - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeArray(responses); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 6440304360bf3..5eab04663e959 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -8,17 +8,23 @@ package org.elasticsearch.action.get; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.OperationRouting; @@ -27,15 +33,17 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -47,7 +55,7 @@ */ public class TransportGetAction extends TransportSingleShardAction { - public static final ActionType TYPE = new ActionType<>("indices:data/read/get", GetResponse::new); + public static final ActionType TYPE = new ActionType<>("indices:data/read/get"); private static final Logger logger = LogManager.getLogger(TransportGetAction.class); private final IndicesService indicesService; @@ -184,8 +192,8 @@ private void asyncGet(GetRequest request, ShardId shardId, ActionListener listener) throws IOException { ShardId shardId = indexShard.shardId(); - var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); if (request.refresh()) { + var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); logger.trace("send refresh action for shard {} to node {}", shardId, node.getId()); var refreshRequest = new BasicReplicationRequest(shardId); refreshRequest.setParentTask(request.getParentTask()); @@ -194,44 +202,97 @@ private void handleGetOnUnpromotableShard(GetRequest request, IndexShard indexSh refreshRequest, listener.delegateFailureAndWrap((l, replicationResponse) -> super.asyncShardOperation(request, shardId, l)) ); - } else if (request.realtime()) { - TransportGetFromTranslogAction.Request getFromTranslogRequest = new TransportGetFromTranslogAction.Request(request, shardId); - getFromTranslogRequest.setParentTask(request.getParentTask()); - transportService.sendRequest( - node, - TransportGetFromTranslogAction.NAME, - getFromTranslogRequest, - new ActionListenerResponseHandler<>(listener.delegateFailure((l, r) -> { - if (r.getResult() != null) { - logger.debug("received result for real-time get for id '{}' from promotable shard", request.id()); - l.onResponse(new GetResponse(r.getResult())); - } else { - logger.debug( - "no result for real-time get for id '{}' from promotable shard (segment generation to wait for: {})", - request.id(), - r.segmentGeneration() - ); - if (r.segmentGeneration() == -1) { - // Nothing to wait for (no previous unsafe generation), just handle the Get locally. - ActionRunnable.supply(l, () -> shardOperation(request, shardId)).run(); - } else { - assert r.segmentGeneration() > -1L; - assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), - listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) - ); - } - } - }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) + return; + } + if (request.realtime()) { + final var state = clusterService.state(); + final var observer = new ClusterStateObserver( + state, + clusterService, + TimeValue.timeValueSeconds(60), + logger, + threadPool.getThreadContext() ); + getFromTranslog(request, indexShard, state, observer, listener); } else { // A non-real-time get with no explicit refresh requested. super.asyncShardOperation(request, shardId, listener); } } + private void getFromTranslog( + GetRequest request, + IndexShard indexShard, + ClusterState state, + ClusterStateObserver observer, + ActionListener listener + ) { + tryGetFromTranslog(request, indexShard, state, listener.delegateResponse((l, e) -> { + final var cause = ExceptionsHelper.unwrapCause(e); + logger.debug("get_from_translog failed", cause); + if (cause instanceof ShardNotFoundException + || cause instanceof IndexNotFoundException + || cause instanceof NoShardAvailableActionException + || cause instanceof UnavailableShardsException) { + logger.debug("retrying get_from_translog"); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + getFromTranslog(request, indexShard, state, observer, l); + } + + @Override + public void onClusterServiceClose() { + l.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + l.onFailure(new ElasticsearchException("Timed out retrying get_from_translog", cause)); + } + }); + } else { + l.onFailure(e); + } + })); + } + + private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, ClusterState state, ActionListener listener) { + ShardId shardId = indexShard.shardId(); + var node = getCurrentNodeOfPrimary(state, shardId); + TransportGetFromTranslogAction.Request getFromTranslogRequest = new TransportGetFromTranslogAction.Request(request, shardId); + getFromTranslogRequest.setParentTask(request.getParentTask()); + transportService.sendRequest( + node, + TransportGetFromTranslogAction.NAME, + getFromTranslogRequest, + new ActionListenerResponseHandler<>(listener.delegateFailure((l, r) -> { + if (r.getResult() != null) { + logger.debug("received result for real-time get for id '{}' from promotable shard", request.id()); + l.onResponse(new GetResponse(r.getResult())); + } else { + logger.debug( + "no result for real-time get for id '{}' from promotable shard (segment generation to wait for: {})", + request.id(), + r.segmentGeneration() + ); + if (r.segmentGeneration() == -1) { + // Nothing to wait for (no previous unsafe generation), just handle the Get locally. + ActionRunnable.supply(l, () -> shardOperation(request, shardId)).run(); + } else { + assert r.segmentGeneration() > -1L; + assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; + indexShard.waitForPrimaryTermAndGeneration( + r.primaryTerm(), + r.segmentGeneration(), + listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) + ); + } + } + }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) + ); + } + static DiscoveryNode getCurrentNodeOfPrimary(ClusterState clusterState, ShardId shardId) { var shardRoutingTable = clusterState.routingTable().shardRoutingTable(shardId); if (shardRoutingTable.primaryShard() == null || shardRoutingTable.primaryShard().active() == false) { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java index 1b180874b433d..cd47531f81599 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java @@ -40,7 +40,6 @@ import java.io.IOException; import java.util.Objects; -// TODO(ES-5727): add a retry mechanism to TransportGetFromTranslogAction public class TransportGetFromTranslogAction extends HandledTransportAction< TransportGetFromTranslogAction.Request, TransportGetFromTranslogAction.Response> { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 7db644415dbc2..fcb10f3deef60 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -35,7 +35,7 @@ public class TransportMultiGetAction extends HandledTransportAction { public static final String NAME = "indices:data/read/mget"; - public static final ActionType TYPE = new ActionType<>(NAME, MultiGetResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); private final ClusterService clusterService; private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index a05bbf1bfd9d3..6dfd706b3268f 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -48,7 +48,7 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction { private static final String ACTION_NAME = TransportMultiGetAction.NAME + "[shard]"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, MultiGetShardResponse::new); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private static final Logger logger = LogManager.getLogger(TransportShardMultiGetAction.class); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 12f7c21cba8e1..eda28eb4e139e 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -43,7 +43,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -73,7 +72,7 @@ public class IndexRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(IndexRequest.class); - private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_500_061; + private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_10_X; /** * Max length of the source document to include into string() @@ -108,6 +107,9 @@ public class IndexRequest extends ReplicatedWriteRequest implement private boolean isPipelineResolved; private boolean requireAlias; + + private boolean requireDataStream; + /** * This indicates whether the response to this request ought to list the ingest pipelines that were executed on the document */ @@ -189,6 +191,11 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio : new ArrayList<>(possiblyImmutableExecutedPipelines); } } + if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + requireDataStream = in.readBoolean(); + } else { + requireDataStream = false; + } } public IndexRequest() { @@ -266,17 +273,7 @@ public ActionRequestValidationException validate() { validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException); - if (id != null && id.getBytes(StandardCharsets.UTF_8).length > MAX_DOCUMENT_ID_LENGTH_IN_BYTES) { - validationException = addValidationError( - "id [" - + id - + "] is too long, must be no longer than " - + MAX_DOCUMENT_ID_LENGTH_IN_BYTES - + " bytes but was: " - + id.getBytes(StandardCharsets.UTF_8).length, - validationException - ); - } + validationException = DocWriteRequest.validateDocIdLength(id, validationException); if (pipeline != null && pipeline.isEmpty()) { validationException = addValidationError("pipeline cannot be an empty string", validationException); @@ -750,6 +747,9 @@ private void writeBody(StreamOutput out) throws IOException { out.writeOptionalCollection(executedPipelines, StreamOutput::writeString); } } + if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + out.writeBoolean(requireDataStream); + } } @Override @@ -805,6 +805,19 @@ public boolean isRequireAlias() { return requireAlias; } + @Override + public boolean isRequireDataStream() { + return requireDataStream; + } + + /** + * Set whether this IndexRequest requires a data stream. The data stream may be pre-existing or to-be-created. + */ + public IndexRequest setRequireDataStream(boolean requireDataStream) { + this.requireDataStream = requireDataStream; + return this; + } + @Override public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { return ia.getWriteIndex(this, metadata); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 5e156070d0154..b8faf39514cbe 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -221,4 +221,12 @@ public IndexRequestBuilder setRequireAlias(boolean requireAlias) { request.setRequireAlias(requireAlias); return this; } + + /** + * Sets the require_data_stream flag + */ + public IndexRequestBuilder setRequireDataStream(boolean requireDataStream) { + request.setRequireDataStream(requireDataStream); + return this; + } } diff --git a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 5e91498244dd0..cc61f40d303d3 100644 --- a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -32,10 +32,7 @@ public class TransportIndexAction extends TransportSingleItemBulkWriteAction { public static final String NAME = "indices:data/write/index"; - public static final ActionType TYPE = new ActionType<>(NAME, in -> { - assert false : "Might not be an IndexResponse!"; - return new IndexResponse(in); - }); + public static final ActionType TYPE = new ActionType<>(NAME); @Inject public TransportIndexAction(ActionFilters actionFilters, TransportService transportService, TransportBulkAction bulkAction) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 80e465e802781..9d2abe7648390 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -42,11 +41,6 @@ public String getId() { return id; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 6878096e38614..a8eef9f94b884 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -29,7 +29,7 @@ public class DeletePipelineTransportAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/ingest/pipeline/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/ingest/pipeline/delete"); private final IngestService ingestService; @Inject diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java index 39a0417541ef7..48cdf5dbd92e6 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java @@ -16,6 +16,6 @@ public class GetPipelineAction extends ActionType { public static final String NAME = "cluster:admin/ingest/pipeline/get"; public GetPipelineAction() { - super(NAME, GetPipelineResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index bc9c88a706f30..c685a49cddf2f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -10,15 +10,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; @@ -27,8 +24,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public class GetPipelineResponse extends ActionResponse implements ToXContentObject { private final List pipelines; @@ -90,32 +85,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * - * @param parser the parser for the XContent that contains the serialized GetPipelineResponse. - * @return an instance of GetPipelineResponse read from the parser - * @throws IOException If the parsing fails - */ - public static GetPipelineResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - List pipelines = new ArrayList<>(); - while (parser.nextToken().equals(Token.FIELD_NAME)) { - String pipelineId = parser.currentName(); - parser.nextToken(); - try (XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent())) { - contentBuilder.generator().copyCurrentStructure(parser); - PipelineConfiguration pipeline = new PipelineConfiguration( - pipelineId, - BytesReference.bytes(contentBuilder), - contentBuilder.contentType() - ); - pipelines.add(pipeline); - } - } - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); - return new GetPipelineResponse(pipelines); - } - @Override public boolean equals(Object other) { if (other == null) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java index 5b5ab44766cce..dcc097ea78389 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -38,12 +39,12 @@ public IngestActionForwarder(TransportService transportService) { } @SuppressWarnings({ "rawtypes", "unchecked" }) - public void forwardIngestRequest(ActionType action, ActionRequest request, ActionListener listener) { + public void forwardIngestRequest(ActionType action, ActionRequest request, ActionListener listener) { transportService.sendRequest( randomIngestNode(), action.name(), request, - new ActionListenerResponseHandler(listener, action.getResponseReader(), TransportResponseHandler.TRANSPORT_WORKER) + new ActionListenerResponseHandler(listener, BulkResponse::new, TransportResponseHandler.TRANSPORT_WORKER) ); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index 592049ddeb200..df891d55adb74 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -54,11 +53,6 @@ public PutPipelineRequest(StreamInput in) throws IOException { this(null, null, null, null); } - @Override - public ActionRequestValidationException validate() { - return null; - } - public String getId() { return id; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 5233961cdda7b..5a97596aa00ff 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -34,7 +34,7 @@ import static org.elasticsearch.ingest.IngestService.INGEST_ORIGIN; public class PutPipelineTransportAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/ingest/pipeline/put"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/ingest/pipeline/put"); private final IngestService ingestService; private final OriginSettingClient client; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 850ff50cd0187..28aec1ee0ebb8 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -9,17 +9,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} * this result class holds the intermediate result each processor did to the sample document. @@ -28,16 +23,6 @@ public final class SimulateDocumentVerboseResult implements SimulateDocumentResu public static final String PROCESSOR_RESULT_FIELD = "processor_results"; private final List processorResults; - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "simulate_document_verbose_result", - true, - a -> new SimulateDocumentVerboseResult((List) a[0]) - ); - static { - PARSER.declareObjectArray(constructorArg(), SimulateProcessorResult.PARSER, new ParseField(PROCESSOR_RESULT_FIELD)); - } - public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } @@ -73,8 +58,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } - - public static SimulateDocumentVerboseResult fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java index 3363f3caa164b..8c6d452fb6298 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,7 +35,7 @@ public SimulateIndexResponse(StreamInput in) throws IOException { super(in); this.source = in.readBytesReference(); this.sourceXContentType = XContentType.valueOf(in.readString()); - setShardInfo(new ReplicationResponse.ShardInfo(0, 0)); + setShardInfo(ShardInfo.EMPTY); } @SuppressWarnings("this-escape") @@ -52,7 +51,7 @@ public SimulateIndexResponse( super(new ShardId(index, "", 0), id == null ? "" : id, 0, 0, version, true, pipelines); this.source = source; this.sourceXContentType = sourceXContentType; - setShardInfo(new ReplicationResponse.ShardInfo(0, 0)); + setShardInfo(ShardInfo.EMPTY); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java index 9d2ee21560907..d62bbbacd13ce 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java @@ -16,6 +16,6 @@ public class SimulatePipelineAction extends ActionType public static final String NAME = "cluster:admin/ingest/pipeline/simulate"; public SimulatePipelineAction() { - super(NAME, SimulatePipelineResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index f5dcb83fa36fc..396a5b63b3cd5 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -8,80 +8,22 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { private String pipelineId; private boolean verbose; private List results; - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "simulate_pipeline_response", - true, - a -> { - List results = (List) a[0]; - boolean verbose = false; - if (results.size() > 0) { - if (results.get(0) instanceof SimulateDocumentVerboseResult) { - verbose = true; - } - } - return new SimulatePipelineResponse(null, verbose, results); - } - ); - static { - PARSER.declareObjectArray(constructorArg(), (parser, context) -> { - Token token = parser.currentToken(); - ensureExpectedToken(Token.START_OBJECT, token, parser); - SimulateDocumentResult result = null; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, token, parser); - String fieldName = parser.currentName(); - token = parser.nextToken(); - if (token == Token.START_ARRAY) { - if (fieldName.equals(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)) { - List results = new ArrayList<>(); - while ((token = parser.nextToken()) == Token.START_OBJECT) { - results.add(SimulateProcessorResult.fromXContent(parser)); - } - ensureExpectedToken(Token.END_ARRAY, token, parser); - result = new SimulateDocumentVerboseResult(results); - } else { - parser.skipChildren(); - } - } else if (token.equals(Token.START_OBJECT)) { - switch (fieldName) { - case WriteableIngestDocument.DOC_FIELD -> result = new SimulateDocumentBaseResult( - WriteableIngestDocument.INGEST_DOC_PARSER.apply(parser, null).getIngestDocument() - ); - case "error" -> result = new SimulateDocumentBaseResult(ElasticsearchException.fromXContent(parser)); - default -> parser.skipChildren(); - } - } // else it is a value skip it - } - assert result != null; - return result; - }, new ParseField(Fields.DOCUMENTS)); - } - public SimulatePipelineResponse(StreamInput in) throws IOException { super(in); this.pipelineId = in.readOptionalString(); @@ -136,10 +78,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static SimulatePipelineResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - static final class Fields { static final String DOCUMENTS = "docs"; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index 1e5b2be8e4d29..f38dff3f8c83c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -118,7 +118,7 @@ protected void doExecute(Task task, SimulatePipelineRequest request, ActionListe logger.trace("forwarding request [{}] to ingest node [{}]", actionName, ingestNode); ActionListenerResponseHandler handler = new ActionListenerResponseHandler<>( listener, - SimulatePipelineAction.INSTANCE.getResponseReader(), + SimulatePipelineResponse::new, TransportResponseHandler.TRANSPORT_WORKER ); if (task == null) { diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 58ab8169ffb30..1da114adb34f6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Releasable; @@ -66,6 +67,7 @@ abstract class AbstractSearchAsyncAction extends SearchPhase implements SearchPhaseContext { private static final float DEFAULT_INDEX_BOOST = 1.0f; private final Logger logger; + private final NamedWriteableRegistry namedWriteableRegistry; private final SearchTransportService searchTransportService; private final Executor executor; private final ActionListener listener; @@ -105,6 +107,7 @@ abstract class AbstractSearchAsyncAction exten AbstractSearchAsyncAction( String name, Logger logger, + NamedWriteableRegistry namedWriteableRegistry, SearchTransportService searchTransportService, BiFunction nodeIdToConnection, Map aliasFilter, @@ -121,6 +124,7 @@ abstract class AbstractSearchAsyncAction exten SearchResponse.Clusters clusters ) { super(name); + this.namedWriteableRegistry = namedWriteableRegistry; final List toSkipIterators = new ArrayList<>(); final List iterators = new ArrayList<>(); for (final SearchShardIterator iterator : shardsIts) { @@ -168,7 +172,7 @@ abstract class AbstractSearchAsyncAction exten this.results = resultConsumer; // register the release of the query consumer to free up the circuit breaker memory // at the end of the search - addReleasable(resultConsumer::decRef); + addReleasable(resultConsumer); this.clusters = clusters; } @@ -647,7 +651,7 @@ public OriginalIndices getOriginalIndices(int shardIndex) { public boolean isPartOfPointInTime(ShardSearchContextId contextId) { final PointInTimeBuilder pointInTimeBuilder = request.pointInTimeBuilder(); if (pointInTimeBuilder != null) { - return request.pointInTimeBuilder().getSearchContextId(searchTransportService.getNamedWriteableRegistry()).contains(contextId); + return request.pointInTimeBuilder().getSearchContextId(namedWriteableRegistry).contains(contextId); } else { return false; } diff --git a/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java b/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java index b4fd0107f731f..96f10d7d8a30e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java +++ b/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java @@ -9,11 +9,11 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.core.AbstractRefCounted; -import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.transport.LeakTracker; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; /** @@ -22,7 +22,13 @@ class ArraySearchPhaseResults extends SearchPhaseResults { final AtomicArray results; - private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(this::doClose)); + private final AtomicBoolean closed = new AtomicBoolean(false); + + private final Releasable releasable = LeakTracker.wrap(() -> { + for (Result result : getAtomicArray().asList()) { + result.decRef(); + } + }); ArraySearchPhaseResults(int size) { super(size); @@ -41,12 +47,16 @@ void consumeResult(Result result, Runnable next) { next.run(); } - protected void doClose() { - for (Result result : getAtomicArray().asList()) { - result.decRef(); + @Override + public final void close() { + if (closed.compareAndSet(false, true)) { + releasable.close(); + doClose(); } } + protected void doClose() {} + boolean hasResult(int shardIndex) { return results.get(shardIndex) != null; } @@ -55,24 +65,4 @@ boolean hasResult(int shardIndex) { AtomicArray getAtomicArray() { return results; } - - @Override - public void incRef() { - refCounted.incRef(); - } - - @Override - public boolean tryIncRef() { - return refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted.hasReferences(); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 9900ee9d824ae..52f41179795d6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -482,24 +482,7 @@ Stream getSuccessfulResults() { } @Override - public void incRef() { - - } - - @Override - public boolean tryIncRef() { - return false; - } - - @Override - public boolean decRef() { - return false; - } - - @Override - public boolean hasReferences() { - return false; - } + public void close() {} } private GroupShardsIterator getIterator( diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 8b1116951df82..3d00d18565756 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -12,33 +12,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class ClearScrollResponse extends ActionResponse implements ToXContentObject { - private static final ParseField SUCCEEDED = new ParseField("succeeded"); - private static final ParseField NUMFREED = new ParseField("num_freed"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "clear_scroll", - true, - a -> new ClosePointInTimeResponse((boolean) a[0], (int) a[1]) - ); - static { - PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SUCCEEDED, ObjectParser.ValueType.BOOLEAN); - PARSER.declareField(constructorArg(), (parser, context) -> parser.intValue(), NUMFREED, ObjectParser.ValueType.INT); - } + public static final ParseField SUCCEEDED = new ParseField("succeeded"); + public static final ParseField NUMFREED = new ParseField("num_freed"); private final boolean succeeded; private final int numFreed; @@ -82,13 +68,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Parse the clear scroll response body into a new {@link ClearScrollResponse} object - */ - public static ClosePointInTimeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(succeeded); diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index 13972ea2bf64a..2c4cb31584323 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -104,20 +104,5 @@ AtomicArray getAtomicArray() { } @Override - public void incRef() {} - - @Override - public boolean tryIncRef() { - return true; - } - - @Override - public boolean decRef() { - return true; - } - - @Override - public boolean hasReferences() { - return false; - } + public void close() {} } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java index 3a12b72570caf..0e6830dcfab0e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java @@ -25,7 +25,6 @@ final class CountedCollector { CountedCollector(SearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { this.resultConsumer = resultConsumer; - resultConsumer.incRef(); this.counter = new CountDown(expectedOps); this.onFinish = onFinish; this.context = context; @@ -38,11 +37,7 @@ final class CountedCollector { void countDown() { assert counter.isCountedDown() == false : "more operations executed than specified"; if (counter.countDown()) { - try { - onFinish.run(); - } finally { - resultConsumer.decRef(); - } + onFinish.run(); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 54408cd560314..0c9d6ba12a27a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -66,7 +66,7 @@ final class DfsQueryPhase extends SearchPhase { // register the release of the query consumer to free up the circuit breaker memory // at the end of the search - context.addReleasable(queryResult::decRef); + context.addReleasable(queryResult); } @Override @@ -151,7 +151,11 @@ ShardSearchRequest rewriteShardSearchRequest(ShardSearchRequest request) { } scoreDocs.sort(Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); String nestedPath = dfsKnnResults.getNestedPath(); - QueryBuilder query = new KnnScoreDocQueryBuilder(scoreDocs.toArray(new ScoreDoc[0])); + QueryBuilder query = new KnnScoreDocQueryBuilder( + scoreDocs.toArray(new ScoreDoc[0]), + source.knnSearch().get(i).getField(), + source.knnSearch().get(i).getQueryVector() + ).boost(source.knnSearch().get(i).boost()); if (nestedPath != null) { query = new NestedQueryBuilder(nestedPath, query, ScoreMode.Max).innerHit(source.knnSearch().get(i).innerHit()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 00e2b41fde3da..7741c1483f69a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -96,6 +96,7 @@ public void run() { hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); } hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + innerHits.mustIncRef(); } } onPhaseDone(); diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 11528f8e1521f..1f06158951392 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -38,11 +38,16 @@ final class FetchSearchPhase extends SearchPhase { private final AggregatedDfs aggregatedDfs; FetchSearchPhase(SearchPhaseResults resultConsumer, AggregatedDfs aggregatedDfs, SearchPhaseContext context) { - this(resultConsumer, aggregatedDfs, context, (response, queryPhaseResults) -> { - response.mustIncRef(); - context.addReleasable(response::decRef); - return new ExpandSearchPhase(context, response.hits, () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults)); - }); + this( + resultConsumer, + aggregatedDfs, + context, + (response, queryPhaseResults) -> new ExpandSearchPhase( + context, + response.hits, + () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults) + ) + ); } FetchSearchPhase( @@ -61,7 +66,7 @@ final class FetchSearchPhase extends SearchPhase { ); } this.fetchResults = new ArraySearchPhaseResults<>(resultConsumer.getNumShards()); - context.addReleasable(fetchResults::decRef); + context.addReleasable(fetchResults); this.queryResults = resultConsumer.getAtomicArray(); this.aggregatedDfs = aggregatedDfs; this.nextPhaseFactory = nextPhaseFactory; @@ -225,10 +230,8 @@ private void moveToNextPhase( AtomicArray fetchResultsArr ) { var resp = SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr); - try { - context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); - } finally { - resp.decRef(); - } + context.addReleasable(resp::decRef); + fetchResults.close(); + context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java index 39813a883c428..874437311d086 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java @@ -54,7 +54,7 @@ public OpenPointInTimeRequest(StreamInput in) throws IOException { this.keepAlive = in.readTimeValue(); this.routing = in.readOptionalString(); this.preference = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.maxConcurrentShardRequests = in.readVInt(); } if (in.getTransportVersion().onOrAfter(TransportVersions.PIT_WITH_INDEX_FILTER)) { @@ -70,7 +70,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(keepAlive); out.writeOptionalString(routing); out.writeOptionalString(preference); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeVInt(maxConcurrentShardRequests); } if (out.getTransportVersion().onOrAfter(TransportVersions.PIT_WITH_INDEX_FILTER)) { diff --git a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java index b7b113601560b..34ee0fc146aa5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java @@ -105,11 +105,7 @@ public QueryPhaseResultConsumer( @Override protected void doClose() { - try { - super.doClose(); - } finally { - pendingMerges.close(); - } + pendingMerges.close(); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 68d1bec590318..fcc848384866a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -31,24 +32,26 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction private final SearchProgressListener progressListener; SearchDfsQueryThenFetchAsyncAction( - final Logger logger, - final SearchTransportService searchTransportService, - final BiFunction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, - final Executor executor, - final SearchPhaseResults queryPhaseResultConsumer, - final SearchRequest request, - final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, - final ClusterState clusterState, - final SearchTask task, + Logger logger, + NamedWriteableRegistry namedWriteableRegistry, + SearchTransportService searchTransportService, + BiFunction nodeIdToConnection, + Map aliasFilter, + Map concreteIndexBoosts, + Executor executor, + SearchPhaseResults queryPhaseResultConsumer, + SearchRequest request, + ActionListener listener, + GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, + ClusterState clusterState, + SearchTask task, SearchResponse.Clusters clusters ) { super( "dfs", logger, + namedWriteableRegistry, searchTransportService, nodeIdToConnection, aliasFilter, @@ -65,7 +68,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction clusters ); this.queryPhaseResultConsumer = queryPhaseResultConsumer; - addReleasable(queryPhaseResultConsumer::decRef); + addReleasable(queryPhaseResultConsumer); this.progressListener = task.getProgressListener(); // don't build the SearchShard list (can be expensive) if the SearchProgressListener won't use it if (progressListener != SearchProgressListener.NOOP) { @@ -92,7 +95,6 @@ protected SearchPhase getNextPhase(final SearchPhaseResults res final List dfsSearchResults = results.getAtomicArray().asList(); final AggregatedDfs aggregatedDfs = SearchPhaseController.aggregateDfs(dfsSearchResults); final List mergedKnnResults = SearchPhaseController.mergeKnnResults(getRequest(), dfsSearchResults); - queryPhaseResultConsumer.incRef(); return new DfsQueryPhase( dfsSearchResults, aggregatedDfs, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 5ffb9024d3ee1..6cfea93068a86 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -364,11 +364,15 @@ public static SearchResponseSections merge( } ScoreDoc[] sortedDocs = reducedQueryPhase.sortedTopDocs.scoreDocs; var fetchResults = fetchResultsArray.asList(); - SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, fetchResultsArray); - if (reducedQueryPhase.suggest != null && fetchResults.isEmpty() == false) { - mergeSuggest(reducedQueryPhase, fetchResultsArray, hits, sortedDocs); + final SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, fetchResultsArray); + try { + if (reducedQueryPhase.suggest != null && fetchResults.isEmpty() == false) { + mergeSuggest(reducedQueryPhase, fetchResultsArray, hits, sortedDocs); + } + return reducedQueryPhase.buildResponse(hits, fetchResults); + } finally { + hits.decRef(); } - return reducedQueryPhase.buildResponse(hits, fetchResults); } private static void mergeSuggest( @@ -462,6 +466,7 @@ private static SearchHits getHits( searchHit.score(shardDoc.score); } hits.add(searchHit); + searchHit.incRef(); } } return new SearchHits( diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index 8347dc8e3dc80..022c3304bc865 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -129,15 +129,25 @@ protected void metadataToXContent(XContentBuilder builder, Params params) throws } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + protected XContentBuilder toXContent(XContentBuilder builder, Params params, int nestedLevel) throws IOException { Throwable ex = ExceptionsHelper.unwrapCause(this); if (ex != this) { - generateThrowableXContent(builder, params, this); + generateThrowableXContent(builder, params, this, nestedLevel); } else { // We don't have a cause when all shards failed, but we do have shards failures so we can "guess" a cause // (see {@link #getCause()}). Here, we use super.getCause() because we don't want the guessed exception to // be rendered twice (one in the "cause" field, one in "failed_shards") - innerToXContent(builder, params, this, getExceptionName(), getMessage(), getHeaders(), getMetadata(), super.getCause()); + innerToXContent( + builder, + params, + this, + getExceptionName(), + getMessage(), + getHeaders(), + getMetadata(), + super.getCause(), + nestedLevel + ); } return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java index 11b8e0a0792a3..28606ecc09f90 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import java.util.stream.Stream; @@ -17,7 +17,7 @@ /** * This class acts as a basic result collection that can be extended to do on-the-fly reduction or result processing */ -abstract class SearchPhaseResults implements RefCounted { +abstract class SearchPhaseResults implements Releasable { private final int numShards; SearchPhaseResults(int numShards) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java index 096f2606d3f02..f5d280a01257c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java @@ -104,6 +104,15 @@ protected void onFetchResult(int shardIndex) {} */ protected void onFetchFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {} + /** + * Indicates that a cluster has finished a search operation. Used for CCS minimize_roundtrips=true only. + * + * @param clusterAlias alias of cluster that has finished a search operation and returned a SearchResponse. + * The cluster alias for the local cluster is RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY. + * @param searchResponse SearchResponse from cluster 'clusterAlias' + */ + protected void onClusterResponseMinimizeRoundtrips(String clusterAlias, SearchResponse searchResponse) {} + final void notifyListShards( List shards, List skippedShards, @@ -167,6 +176,14 @@ final void notifyFetchFailure(int shardIndex, SearchShardTarget shardTarget, Exc } } + final void notifyClusterResponseMinimizeRoundtrips(String clusterAlias, SearchResponse searchResponse) { + try { + onClusterResponseMinimizeRoundtrips(clusterAlias, searchResponse); + } catch (Exception e) { + logger.warn(() -> "[" + clusterAlias + "] Failed to execute progress listener onResponseMinimizeRoundtrips", e); + } + } + static List buildSearchShards(List results) { return results.stream() .filter(Objects::nonNull) diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 51d330f55aee1..3ad7c52567d14 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; @@ -37,17 +38,18 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, - final Executor executor, - final SearchPhaseResults resultConsumer, - final SearchRequest request, - final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, + Logger logger, + NamedWriteableRegistry namedWriteableRegistry, + SearchTransportService searchTransportService, + BiFunction nodeIdToConnection, + Map aliasFilter, + Map concreteIndexBoosts, + Executor executor, + SearchPhaseResults resultConsumer, + SearchRequest request, + ActionListener listener, + GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, SearchResponse.Clusters clusters @@ -55,6 +57,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction toXContentChunked(ToXContent.Params params) { + assert hasReferences(); return Iterators.concat( ChunkedToXContentHelper.startObject(), this.innerToXContentChunked(params), @@ -416,7 +448,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); String currentFieldName = parser.currentName(); SearchHits hits = null; - Aggregations aggs = null; + InternalAggregations aggs = null; Suggest suggest = null; SearchProfileResults profile = null; boolean timedOut = false; @@ -452,8 +484,8 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } else if (token == Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { hits = SearchHits.fromXContent(parser); - } else if (Aggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { - aggs = Aggregations.fromXContent(parser); + } else if (InternalAggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { + aggs = InternalAggregations.fromXContent(parser); } else if (Suggest.NAME.equals(currentFieldName)) { suggest = Suggest.fromXContent(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { @@ -493,6 +525,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } } } + return new SearchResponse( hits, aggs, @@ -514,8 +547,9 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); hits.writeTo(out); - out.writeOptionalWriteable((InternalAggregations) aggregations); + out.writeOptionalWriteable(aggregations); out.writeOptionalWriteable(suggest); out.writeBoolean(timedOut); out.writeOptionalBoolean(terminatedEarly); @@ -537,7 +571,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { - return Strings.toString(this); + return hasReferences() == false ? "SearchResponse[released]" : Strings.toString(this); } /** @@ -632,7 +666,7 @@ public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); int successfulTemp = in.readVInt(); int skippedTemp = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { List clusterList = in.readCollectionAsList(Cluster::new); if (clusterList.isEmpty()) { this.clusterInfo = Collections.emptyMap(); @@ -685,7 +719,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(total); out.writeVInt(successful); out.writeVInt(skipped); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { if (clusterInfo != null) { List clusterList = clusterInfo.values().stream().toList(); out.writeCollection(clusterList); @@ -1050,7 +1084,7 @@ public Cluster(StreamInput in) throws IOException { } this.timedOut = in.readBoolean(); this.failures = Collections.unmodifiableList(in.readCollectionAsList(ShardSearchFailure::readShardSearchFailure)); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_066)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { this.skipUnavailable = in.readBoolean(); } else { this.skipUnavailable = SKIP_UNAVAILABLE_DEFAULT; @@ -1155,7 +1189,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalLong(took == null ? null : took.millis()); out.writeBoolean(timedOut); out.writeCollection(failures); - if (out.getTransportVersion().onOrAfter(TransportVersions.SEARCH_RESP_SKIP_UNAVAILABLE_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeBoolean(skipUnavailable); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 1b616b9f3bc87..ae8c749475c5d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -65,7 +65,7 @@ // TODO it may make sense to integrate the remote clusters responses as a shard response in the initial search phase and ignore hits coming // from the remote clusters in the fetch phase. This would be identical to the removed QueryAndFetch strategy except that only the remote // cluster response would have the fetch results. -final class SearchResponseMerger implements Releasable { +public final class SearchResponseMerger implements Releasable { final int from; final int size; final int trackTotalHitsUpTo; @@ -98,7 +98,7 @@ final class SearchResponseMerger implements Releasable { * Merges currently happen at once when all responses are available and {@link #getMergedResponse(Clusters)} )} is called. * That may change in the future as it's possible to introduce incremental merges as responses come in if necessary. */ - void add(SearchResponse searchResponse) { + public void add(SearchResponse searchResponse) { assert searchResponse.getScrollId() == null : "merging scroll results is not supported"; searchResponse.mustIncRef(); searchResponses.add(searchResponse); @@ -109,10 +109,13 @@ int numResponses() { } /** - * Returns the merged response. To be called once all responses have been added through {@link #add(SearchResponse)} - * so that all responses are merged into a single one. + * Returns the merged response of all SearchResponses received so far. Can be called at any point, + * including when only some clusters have finished, in order to get "incremental" partial results. + * @param clusters The Clusters object for the search to report on the status of each cluster + * involved in the cross-cluster search + * @return merged response */ - SearchResponse getMergedResponse(Clusters clusters) { + public SearchResponse getMergedResponse(Clusters clusters) { // if the search is only across remote clusters, none of them are available, and all of them have skip_unavailable set to true, // we end up calling merge without anything to merge, we just return an empty search response if (searchResponses.size() == 0) { @@ -144,7 +147,7 @@ SearchResponse getMergedResponse(Clusters clusters) { profileResults.putAll(searchResponse.getProfileResults()); if (searchResponse.hasAggregations()) { - InternalAggregations internalAggs = (InternalAggregations) searchResponse.getAggregations(); + InternalAggregations internalAggs = searchResponse.getAggregations(); aggs.add(internalAggs); } @@ -201,33 +204,37 @@ SearchResponse getMergedResponse(Clusters clusters) { setTopDocsShardIndex(shards, topDocsList); TopDocs topDocs = mergeTopDocs(topDocsList, size, from); SearchHits mergedSearchHits = topDocsToSearchHits(topDocs, topDocsStats); - setSuggestShardIndex(shards, groupedSuggestions); - Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); - InternalAggregations reducedAggs = aggs.isEmpty() - ? InternalAggregations.EMPTY - : InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forFinalReduction()); - ShardSearchFailure[] shardFailures = failures.toArray(ShardSearchFailure.EMPTY_ARRAY); - SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); - // make failures ordering consistent between ordinary search and CCS by looking at the shard they come from - Arrays.sort(shardFailures, FAILURES_COMPARATOR); - long tookInMillis = searchTimeProvider.buildTookInMillis(); - return new SearchResponse( - mergedSearchHits, - reducedAggs, - suggest, - topDocsStats.timedOut, - topDocsStats.terminatedEarly, - profileShardResults, - numReducePhases, - null, - totalShards, - successfulShards, - skippedShards, - tookInMillis, - shardFailures, - clusters, - null - ); + try { + setSuggestShardIndex(shards, groupedSuggestions); + Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); + InternalAggregations reducedAggs = aggs.isEmpty() + ? InternalAggregations.EMPTY + : InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forFinalReduction()); + ShardSearchFailure[] shardFailures = failures.toArray(ShardSearchFailure.EMPTY_ARRAY); + SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); + // make failures ordering consistent between ordinary search and CCS by looking at the shard they come from + Arrays.sort(shardFailures, FAILURES_COMPARATOR); + long tookInMillis = searchTimeProvider.buildTookInMillis(); + return new SearchResponse( + mergedSearchHits, + reducedAggs, + suggest, + topDocsStats.timedOut, + topDocsStats.terminatedEarly, + profileShardResults, + numReducePhases, + null, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardFailures, + clusters, + null + ); + } finally { + mergedSearchHits.decRef(); + } } private static final Comparator FAILURES_COMPARATOR = new Comparator() { @@ -373,6 +380,7 @@ private static SearchHits topDocsToSearchHits(TopDocs topDocs, TopDocsStats topD for (int i = 0; i < topDocs.scoreDocs.length; i++) { FieldDocAndSearchHit scoreDoc = (FieldDocAndSearchHit) topDocs.scoreDocs[i]; searchHits[i] = scoreDoc.searchHit; + scoreDoc.searchHit.mustIncRef(); } } SortField[] sortFields = null; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index 805ef033db27a..a3763bf101b15 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; @@ -45,7 +45,7 @@ public class SearchResponseSections implements RefCounted { 1 ); protected final SearchHits hits; - protected final Aggregations aggregations; + protected final InternalAggregations aggregations; protected final Suggest suggest; protected final SearchProfileResults profileResults; protected final boolean timedOut; @@ -56,7 +56,7 @@ public class SearchResponseSections implements RefCounted { public SearchResponseSections( SearchHits hits, - Aggregations aggregations, + InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, @@ -64,6 +64,7 @@ public SearchResponseSections( int numReducePhases ) { this.hits = hits; + hits.incRef(); this.aggregations = aggregations; this.suggest = suggest; this.profileResults = profileResults; @@ -73,7 +74,7 @@ public SearchResponseSections( refCounted = hits.getHits().length > 0 ? LeakTracker.wrap(new AbstractRefCounted() { @Override protected void closeInternal() { - // TODO: noop until hits are ref counted + hits.decRef(); } }) : ALWAYS_REFERENCED; } @@ -90,7 +91,7 @@ public final SearchHits hits() { return hits; } - public final Aggregations aggregations() { + public final InternalAggregations aggregations() { return aggregations; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTask.java b/server/src/main/java/org/elasticsearch/action/search/SearchTask.java index b7e8de3b97b03..3bf72313c4c21 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTask.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTask.java @@ -21,6 +21,7 @@ public class SearchTask extends CancellableTask { // generating description in a lazy way since source can be quite big private final Supplier descriptionSupplier; private SearchProgressListener progressListener = SearchProgressListener.NOOP; + private Supplier searchResponseMergerSupplier; // used for CCS minimize_roundtrips=true public SearchTask( long id, @@ -53,4 +54,19 @@ public final SearchProgressListener getProgressListener() { return progressListener; } + /** + * @return the Supplier of {@link SearchResponseMerger} attached to this task. Will be null + * for local-only search and cross-cluster searches with minimize_roundtrips=false. + */ + public Supplier getSearchResponseMergerSupplier() { + return searchResponseMergerSupplier; + } + + /** + * @param supplier Attach a Supplier of {@link SearchResponseMerger} to this task. + * For use with CCS minimize_roundtrips=true + */ + public void setSearchResponseMergerSupplier(Supplier supplier) { + this.searchResponseMergerSupplier = supplier; + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index b7cc61ad70e2f..9fb0c87c78eb7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -19,7 +19,6 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -678,8 +677,4 @@ public void cancelSearchTask(SearchTask task, String reason) { // force the origin to execute the cancellation as a system user new OriginSettingClient(client, GetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.noop()); } - - public NamedWriteableRegistry getNamedWriteableRegistry() { - return client.getNamedWriteableRegistry(); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index e1a6bb6c42b2e..65284d5d55585 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -22,7 +22,7 @@ public class TransportClearScrollAction extends HandledTransportAction TYPE = new ActionType<>(NAME, ClearScrollResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); private final ClusterService clusterService; private final SearchTransportService searchTransportService; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClosePointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClosePointInTimeAction.java index 338e63d6af2a6..f507b2e1136a8 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClosePointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClosePointInTimeAction.java @@ -23,10 +23,7 @@ public class TransportClosePointInTimeAction extends HandledTransportAction { - public static final ActionType TYPE = new ActionType<>( - "indices:data/read/close_point_in_time", - ClosePointInTimeResponse::new - ); + public static final ActionType TYPE = new ActionType<>("indices:data/read/close_point_in_time"); private final ClusterService clusterService; private final SearchTransportService searchTransportService; private final NamedWriteableRegistry namedWriteableRegistry; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index c81f3c3dc24c6..a2c41042871ba 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -36,7 +36,7 @@ public class TransportMultiSearchAction extends HandledTransportAction { public static final String NAME = "indices:data/read/msearch"; - public static final ActionType TYPE = new ActionType<>(NAME, MultiSearchResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportMultiSearchAction.class); private final int allocatedProcessors; private final ThreadPool threadPool; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 3b1093c207854..91784ba331857 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -54,13 +55,11 @@ public class TransportOpenPointInTimeAction extends HandledTransportAction TYPE = new ActionType<>( - "indices:data/read/open_point_in_time", - OpenPointInTimeResponse::new - ); + public static final ActionType TYPE = new ActionType<>("indices:data/read/open_point_in_time"); private final TransportSearchAction transportSearchAction; private final SearchTransportService searchTransportService; + private final NamedWriteableRegistry namedWriteableRegistry; private final TransportService transportService; private final SearchService searchService; @@ -70,13 +69,15 @@ public TransportOpenPointInTimeAction( SearchService searchService, ActionFilters actionFilters, TransportSearchAction transportSearchAction, - SearchTransportService searchTransportService + SearchTransportService searchTransportService, + NamedWriteableRegistry namedWriteableRegistry ) { super(TYPE.name(), transportService, actionFilters, OpenPointInTimeRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.transportService = transportService; this.transportSearchAction = transportSearchAction; this.searchService = searchService; this.searchTransportService = searchTransportService; + this.namedWriteableRegistry = namedWriteableRegistry; transportService.registerRequestHandler( OPEN_SHARD_READER_CONTEXT_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, @@ -131,6 +132,9 @@ public SearchPhase newSearchPhase( ThreadPool threadPool, SearchResponse.Clusters clusters ) { + // Note: remote shards are prefiltered via can match as part of search shards. They don't need additional pre-filtering and + // that is signaled to the local can match through the SearchShardIterator#prefiltered flag. Local shards do need to go + // through the local can match phase. if (SearchService.canRewriteToMatchNone(searchRequest.source())) { return new CanMatchPreFilterSearchPhase( logger, @@ -193,6 +197,7 @@ SearchPhase openPointInTimePhase( return new AbstractSearchAsyncAction<>( actionName, logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4e9aed5f643f2..d391bc198725b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; @@ -24,7 +25,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -112,7 +112,8 @@ public class TransportSearchAction extends HandledTransportAction { public static final String NAME = "indices:data/read/search"; - public static final ActionType TYPE = new ActionType<>(NAME, SearchResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>(NAME, SearchResponse::new); private static final Logger logger = LogManager.getLogger(TransportSearchAction.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportSearchAction.class); public static final String FROZEN_INDICES_DEPRECATION_MESSAGE = "Searching frozen indices [{}] is deprecated." @@ -362,6 +363,7 @@ void executeRequest( .notifyListShards(Collections.emptyList(), Collections.emptyList(), clusters, false, timeProvider); } ccsRemoteReduce( + task, parentTaskId, rewritten, localIndices, @@ -496,6 +498,7 @@ public static boolean shouldMinimizeRoundtrips(SearchRequest searchRequest) { * Handles ccs_minimize_roundtrips=true */ static void ccsRemoteReduce( + SearchTask task, TaskId parentTaskId, SearchRequest searchRequest, OriginalIndices localIndices, @@ -524,15 +527,10 @@ static void ccsRemoteReduce( timeProvider.absoluteStartMillis(), true ); - Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, - clusterAlias, - remoteClientResponseExecutor - ); - remoteClusterClient.search(ccsSearchRequest, new ActionListener<>() { + var remoteClusterClient = remoteClusterService.getRemoteClusterClient(clusterAlias, remoteClientResponseExecutor); + remoteClusterClient.execute(TransportSearchAction.REMOTE_TYPE, ccsSearchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - // TODO: in CCS fail fast ticket we may need to fail the query if the cluster is marked as FAILED // overwrite the existing cluster entry with the updated one ccsClusterInfoUpdate(searchResponse, clusters, clusterAlias, skipUnavailable); Map profileResults = searchResponse.getProfileResults(); @@ -580,6 +578,9 @@ public void onFailure(Exception e) { timeProvider, aggReduceContextBuilder ); + task.setSearchResponseMergerSupplier( + () -> createSearchResponseMerger(searchRequest.source(), timeProvider, aggReduceContextBuilder) + ); final AtomicReference exceptions = new AtomicReference<>(); int totalClusters = remoteIndices.size() + (localIndices == null ? 0 : 1); final CountDown countDown = new CountDown(totalClusters); @@ -602,14 +603,11 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, clusters, + task.getProgressListener(), listener ); - Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, - clusterAlias, - remoteClientResponseExecutor - ); - remoteClusterClient.search(ccsSearchRequest, ccsListener); + final var remoteClusterClient = remoteClusterService.getRemoteClusterClient(clusterAlias, remoteClientResponseExecutor); + remoteClusterClient.execute(TransportSearchAction.REMOTE_TYPE, ccsSearchRequest, ccsListener); } if (localIndices != null) { ActionListener ccsListener = createCCSListener( @@ -619,6 +617,7 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, clusters, + task.getProgressListener(), listener ); SearchRequest ccsLocalSearchRequest = SearchRequest.subSearchRequest( @@ -710,7 +709,7 @@ Map createFinalResponse() { final String[] indices = entry.getValue().indices(); final Executor responseExecutor = transportService.getThreadPool().executor(ThreadPool.Names.SEARCH_COORDINATION); // TODO: support point-in-time - if (searchContext == null && connection.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (searchContext == null && connection.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { SearchShardsRequest searchShardsRequest = new SearchShardsRequest( indices, indicesOptions, @@ -759,6 +758,7 @@ private static ActionListener createCCSListener( AtomicReference exceptions, SearchResponseMerger searchResponseMerger, SearchResponse.Clusters clusters, + SearchProgressListener progressListener, ActionListener originalListener ) { return new CCSActionListener<>( @@ -771,9 +771,9 @@ private static ActionListener createCCSListener( ) { @Override void innerOnResponse(SearchResponse searchResponse) { - // TODO: in CCS fail fast ticket we may need to fail the query if the cluster gets marked as FAILED ccsClusterInfoUpdate(searchResponse, clusters, clusterAlias, skipUnavailable); searchResponseMerger.add(searchResponse); + progressListener.notifyClusterResponseMinimizeRoundtrips(clusterAlias, searchResponse); } @Override @@ -981,8 +981,12 @@ static List getRemoteShardsIteratorFromPointInTime( for (Map.Entry entry : searchShardsResponses.entrySet()) { for (SearchShardsGroup group : entry.getValue().getGroups()) { final ShardId shardId = group.shardId(); - final String clusterAlias = entry.getKey(); final SearchContextIdForNode perNode = searchContextId.shards().get(shardId); + if (perNode == null) { + // the shard was skipped after can match, hence it is not even part of the pit id + continue; + } + final String clusterAlias = entry.getKey(); assert clusterAlias.equals(perNode.getClusterAlias()) : clusterAlias + " != " + perNode.getClusterAlias(); final List targetNodes = new ArrayList<>(group.allocatedNodes().size()); targetNodes.add(perNode.getNode()); @@ -1011,9 +1015,26 @@ static List getRemoteShardsIteratorFromPointInTime( remoteShardIterators.add(shardIterator); } } + assert checkAllRemotePITShardsWereReturnedBySearchShards(searchContextId.shards(), searchShardsResponses) + : "search shards did not return remote shards that PIT included: " + searchContextId.shards(); return remoteShardIterators; } + private static boolean checkAllRemotePITShardsWereReturnedBySearchShards( + Map searchContextIdShards, + Map searchShardsResponses + ) { + Map searchContextIdForNodeMap = new HashMap<>(searchContextIdShards); + for (SearchShardsResponse searchShardsResponse : searchShardsResponses.values()) { + for (SearchShardsGroup group : searchShardsResponse.getGroups()) { + searchContextIdForNodeMap.remove(group.shardId()); + } + } + return searchContextIdForNodeMap.values() + .stream() + .allMatch(searchContextIdForNode -> searchContextIdForNode.getClusterAlias() == null); + } + Index[] resolveLocalIndices(OriginalIndices localIndices, ClusterState clusterState, SearchTimeProvider timeProvider) { if (localIndices == null) { return Index.EMPTY_ARRAY; // don't search on any local index (happens when only remote indices were specified) @@ -1307,6 +1328,7 @@ public SearchPhase newSearchPhase( if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { return new SearchDfsQueryThenFetchAsyncAction( logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, @@ -1325,6 +1347,7 @@ public SearchPhase newSearchPhase( assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); return new SearchQueryThenFetchAsyncAction( logger, + namedWriteableRegistry, searchTransportService, connectionLookup, aliasFilter, @@ -1471,7 +1494,6 @@ public final void onFailure(Exception e) { if (cluster != null) { ccsClusterInfoUpdate(f, clusters, clusterAlias, true); } - // skippedClusters.incrementAndGet(); } else { if (cluster != null) { ccsClusterInfoUpdate(f, clusters, clusterAlias, false); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 0a2b496a5eb8a..27086366283f9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -27,7 +28,11 @@ import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId; public class TransportSearchScrollAction extends HandledTransportAction { - public static final ActionType TYPE = new ActionType<>("indices:data/read/scroll", SearchResponse::new); + public static final ActionType TYPE = new ActionType<>("indices:data/read/scroll"); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + TYPE.name(), + SearchResponse::new + ); private static final Logger logger = LogManager.getLogger(TransportSearchScrollAction.class); private final ClusterService clusterService; private final SearchTransportService searchTransportService; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index 0d1672c77cbed..60efb910a5269 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; @@ -44,7 +45,11 @@ public class TransportSearchShardsAction extends HandledTransportAction { public static final String NAME = "indices:admin/search/search_shards"; - public static final ActionType TYPE = new ActionType<>(NAME, SearchShardsResponse::new); + public static final ActionType TYPE = new ActionType<>(NAME); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + SearchShardsResponse::new + ); private final TransportService transportService; private final TransportSearchAction transportSearchAction; private final SearchService searchService; diff --git a/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java b/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java index c746bc9acf2a1..a4836ca322035 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java @@ -28,7 +28,6 @@ public ChannelActionListener(TransportChannel channel) { @Override public void onResponse(Response response) { - response.incRef(); // acquire reference that will be released by channel.sendResponse below ActionListener.run(this, l -> l.channel.sendResponse(response)); } diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 688f0c4264236..65739b01422a5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -34,11 +34,12 @@ * An {@link ActionListener} to which other {@link ActionListener} instances can subscribe, such that when this listener is completed it * fans-out its result to the subscribed listeners. *

    - * Similar to {@link ListenableActionFuture} and {@link ListenableFuture} except for its handling of exceptions: if this listener is - * completed exceptionally then the exception is passed to subscribed listeners without modification. + * Exceptions are passed to subscribed listeners without modification. {@link ListenableActionFuture} and {@link ListenableFuture} are child + * classes that provide additional exception handling. *

    - * Often this will be used to chain together a sequence of async actions, similarly to {@link CompletionStage} (without the - * {@code catch (Throwable t)}), such as in the following example: + * A sequence of async steps can be chained together using a series of {@link SubscribableListener}s, similar to {@link CompletionStage} + * (without the {@code catch (Throwable t)}). Listeners can be created for each step, where the next step subscribes to the result of the + * previous, using utilities like {@link #andThen(CheckedBiConsumer)}. The following example demonstrates how this might be used: *

    {@code
      * private void exampleAsyncMethod(String request, List items, ActionListener finalListener) {
      *     SubscribableListener
    diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
    index 35f1b645293bd..222941981f05a 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
    @@ -14,7 +14,6 @@
     import org.elasticsearch.action.ActionRequest;
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.action.ActionResponse;
    -import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.core.Releasable;
     import org.elasticsearch.core.Releasables;
     import org.elasticsearch.tasks.Task;
    @@ -139,7 +138,7 @@ public void onFailure(Exception e) {
     
         /**
          * A method to use as a placeholder in implementations of {@link TransportAction} which only ever run on the local node, and therefore
    -     * do not need to serialize or deserialize any messages. See also {@link Writeable.Reader#localOnly()}.
    +     * do not need to serialize or deserialize any messages.
          */
         // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved
         public static  T localOnly() {
    diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java
    index 52b4c00175fa8..b69b87190f2a7 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java
    @@ -42,7 +42,7 @@ public class BaseBroadcastResponse extends ActionResponse {
         private final DefaultShardOperationFailedException[] shardFailures;
     
         @SuppressWarnings("unchecked")
    -    protected static  void declareBroadcastFields(ConstructingObjectParser PARSER) {
    +    public static  void declareBroadcastFields(ConstructingObjectParser PARSER) {
             ConstructingObjectParser shardsParser = new ConstructingObjectParser<>(
                 "_shards",
                 true,
    diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java
    index bf8376cfc5481..312a9843c9e2b 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java
    @@ -46,7 +46,7 @@ public BroadcastUnpromotableRequest(StreamInput in) throws IOException {
             indexShardRoutingTable = null;
             shardId = new ShardId(in);
             indices = new String[] { shardId.getIndex().getName() };
    -        failShardOnError = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) && in.readBoolean();
    +        failShardOnError = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) && in.readBoolean();
         }
     
         public BroadcastUnpromotableRequest(IndexShardRoutingTable indexShardRoutingTable) {
    diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
    index b9222aa8c53bc..c32ad9cf41a74 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
    @@ -7,6 +7,7 @@
      */
     package org.elasticsearch.action.support.master;
     
    +import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.cluster.ack.AckedRequest;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
    @@ -82,4 +83,21 @@ public void writeTo(StreamOutput out) throws IOException {
             out.writeTimeValue(timeout);
         }
     
    +    @Override
    +    public ActionRequestValidationException validate() {
    +        return null;
    +    }
    +
    +    /**
    +     * AcknowledgedRequest that does not have any additional fields. Should be used instead of implementing noop children for
    +     * AcknowledgedRequest.
    +     */
    +    public static final class Plain extends AcknowledgedRequest {
    +
    +        public Plain(StreamInput in) throws IOException {
    +            super(in);
    +        }
    +
    +        public Plain() {}
    +    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
    index 4fb243891709b..48036239793c0 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
    @@ -230,7 +230,12 @@ protected void resolveRequest(NodesRequest request, ClusterState clusterState) {
         class NodeTransportHandler implements TransportRequestHandler {
             @Override
             public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception {
    -            channel.sendResponse(nodeOperation(request, task));
    +            final var nodeResponse = nodeOperation(request, task);
    +            try {
    +                channel.sendResponse(nodeResponse);
    +            } finally {
    +                nodeResponse.decRef();
    +            }
             }
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
    index 68cc02a613aae..04ba462523f5f 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
    @@ -466,14 +466,13 @@ private void decPendingAndFinishIfNeeded() {
     
         private void finish() {
             if (finished.compareAndSet(false, true)) {
    -            final ReplicationResponse.ShardInfo.Failure[] failuresArray;
    -            if (shardReplicaFailures.isEmpty()) {
    -                failuresArray = ReplicationResponse.NO_FAILURES;
    -            } else {
    -                failuresArray = new ReplicationResponse.ShardInfo.Failure[shardReplicaFailures.size()];
    -                shardReplicaFailures.toArray(failuresArray);
    -            }
    -            primaryResult.setShardInfo(new ReplicationResponse.ShardInfo(totalShards.get(), successfulShards.get(), failuresArray));
    +            primaryResult.setShardInfo(
    +                ReplicationResponse.ShardInfo.of(
    +                    totalShards.get(),
    +                    successfulShards.get(),
    +                    shardReplicaFailures.toArray(ReplicationResponse.NO_FAILURES)
    +                )
    +            );
                 resultListener.onResponse(primaryResult);
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java
    index 209809a4294d9..06e9018c8983d 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java
    @@ -27,6 +27,7 @@
     import java.util.ArrayList;
     import java.util.Arrays;
     import java.util.List;
    +import java.util.stream.IntStream;
     
     import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     
    @@ -43,7 +44,7 @@ public ReplicationResponse() {}
     
         public ReplicationResponse(StreamInput in) throws IOException {
             super(in);
    -        shardInfo = new ReplicationResponse.ShardInfo(in);
    +        shardInfo = ReplicationResponse.ShardInfo.readFrom(in);
         }
     
         @Override
    @@ -61,30 +62,61 @@ public void setShardInfo(ShardInfo shardInfo) {
     
         public static class ShardInfo implements Writeable, ToXContentObject {
     
    +        // cache the most commonly used instances where all shard operations succeeded to save allocations on the transport layer
    +        private static final ShardInfo[] COMMON_INSTANCES = IntStream.range(0, 10)
    +            .mapToObj(i -> new ShardInfo(i, i, NO_FAILURES))
    +            .toArray(ShardInfo[]::new);
    +
    +        public static final ShardInfo EMPTY = COMMON_INSTANCES[0];
    +
             private static final String TOTAL = "total";
             private static final String SUCCESSFUL = "successful";
             private static final String FAILED = "failed";
             private static final String FAILURES = "failures";
     
    -        private int total;
    -        private int successful;
    -        private Failure[] failures = ReplicationResponse.NO_FAILURES;
    +        private final int total;
    +        private final int successful;
    +        private final Failure[] failures;
     
    -        public ShardInfo() {}
    -
    -        public ShardInfo(StreamInput in) throws IOException {
    -            total = in.readVInt();
    -            successful = in.readVInt();
    +        public static ShardInfo readFrom(StreamInput in) throws IOException {
    +            int total = in.readVInt();
    +            int successful = in.readVInt();
                 int size = in.readVInt();
    +
    +            final Failure[] failures;
                 if (size > 0) {
                     failures = new Failure[size];
                     for (int i = 0; i < size; i++) {
                         failures[i] = new Failure(in);
                     }
    +            } else {
    +                failures = NO_FAILURES;
    +            }
    +            return ShardInfo.of(total, successful, failures);
    +        }
    +
    +        public static ShardInfo allSuccessful(int total) {
    +            if (total < COMMON_INSTANCES.length) {
    +                return COMMON_INSTANCES[total];
                 }
    +            return new ShardInfo(total, total, NO_FAILURES);
    +        }
    +
    +        public static ShardInfo of(int total, int successful) {
    +            if (total == successful) {
    +                return allSuccessful(total);
    +            }
    +            return new ShardInfo(total, successful, ReplicationResponse.NO_FAILURES);
    +        }
    +
    +        public static ShardInfo of(int total, int successful, Failure[] failures) {
    +            if (failures.length == 0) {
    +                return of(total, successful);
    +            }
    +            return new ShardInfo(total, successful, failures);
             }
     
    -        public ShardInfo(int total, int successful, Failure... failures) {
    +        private ShardInfo(int total, int successful, Failure[] failures) {
                 assert total >= 0 && successful >= 0;
                 this.total = total;
                 this.successful = successful;
    @@ -188,7 +220,7 @@ public static ShardInfo fromXContent(XContentParser parser) throws IOException {
                 }
                 Failure[] failures = ReplicationResponse.NO_FAILURES;
                 if (failuresList != null) {
    -                failures = failuresList.toArray(new Failure[failuresList.size()]);
    +                failures = failuresList.toArray(ReplicationResponse.NO_FAILURES);
                 }
                 return new ShardInfo(total, successful, failures);
             }
    diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
    index 0abe7ad678dc5..a935c0e4e06bb 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
    @@ -961,7 +961,7 @@ public Response read(StreamInput in) throws IOException {
                     }
     
                     @Override
    -                public Executor executor(ThreadPool threadPool) {
    +                public Executor executor() {
                         return TransportResponseHandler.TRANSPORT_WORKER;
                     }
     
    diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
    index e94a619c7785e..0ee6af717a1cb 100644
    --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
    @@ -134,7 +134,7 @@ private static  void addAllSynchronized(List allResults, Collection res
     
                 @Override
                 protected void onItemFailure(String nodeId, Exception e) {
    -                logger.debug(() -> Strings.format("failed to execute on node [{}]", nodeId), e);
    +                logger.debug(() -> Strings.format("failed to execute on node [%s]", nodeId), e);
                     synchronized (failedNodeExceptions) {
                         failedNodeExceptions.add(new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", e));
                     }
    diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/AbstractSynonymsPagedResultAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/AbstractSynonymsPagedResultAction.java
    index ba2ff244d7b21..f1ebafe28be7c 100644
    --- a/server/src/main/java/org/elasticsearch/action/synonyms/AbstractSynonymsPagedResultAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/synonyms/AbstractSynonymsPagedResultAction.java
    @@ -35,7 +35,7 @@
     public abstract class AbstractSynonymsPagedResultAction extends ActionType {
     
         public AbstractSynonymsPagedResultAction(String name, Writeable.Reader reader) {
    -        super(name, reader);
    +        super(name);
         }
     
         /**
    diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleAction.java
    index 17e25aea6952f..cc98da3648709 100644
    --- a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleAction.java
    @@ -25,7 +25,7 @@ public class DeleteSynonymRuleAction extends ActionType {
         public static final String NAME = "cluster:admin/synonym_rules/delete";
     
         public DeleteSynonymRuleAction() {
    -        super(NAME, SynonymUpdateResponse::new);
    +        super(NAME);
         }
     
         public static class Request extends ActionRequest {
    diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java
    index 11e99dab6066b..52bbb40eb52f8 100644
    --- a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java
    @@ -25,7 +25,7 @@ public class DeleteSynonymsAction extends ActionType {
         public static final String NAME = "cluster:admin/synonyms/delete";
     
         public DeleteSynonymsAction() {
    -        super(NAME, AcknowledgedResponse::readFrom);
    +        super(NAME);
         }
     
         public static class Request extends ActionRequest {
    diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/GetSynonymRuleAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/GetSynonymRuleAction.java
    index dee27849adc30..6a1c3e17a70ad 100644
    --- a/server/src/main/java/org/elasticsearch/action/synonyms/GetSynonymRuleAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/synonyms/GetSynonymRuleAction.java
    @@ -29,7 +29,7 @@ public class GetSynonymRuleAction extends ActionType {
         public static final String NAME = "cluster:admin/synonym_rules/put";
     
         public PutSynonymRuleAction() {
    -        super(NAME, SynonymUpdateResponse::new);
    +        super(NAME);
         }
     
         public static class Request extends ActionRequest {
    diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/PutSynonymsAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/PutSynonymsAction.java
    index 6662d04dd55ab..6bb6f484e54d3 100644
    --- a/server/src/main/java/org/elasticsearch/action/synonyms/PutSynonymsAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/synonyms/PutSynonymsAction.java
    @@ -36,7 +36,7 @@ public class PutSynonymsAction extends ActionType {
         public static final String NAME = "cluster:admin/synonyms/put";
     
         public PutSynonymsAction() {
    -        super(NAME, SynonymUpdateResponse::new);
    +        super(NAME);
         }
     
         public static class Request extends ActionRequest {
    diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java
    index b8a62f9f0509b..0f5917cdcb259 100644
    --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java
    @@ -16,6 +16,6 @@ public class MultiTermVectorsAction extends ActionType
         public static final String NAME = "indices:data/read/mtv";
     
         private MultiTermVectorsAction() {
    -        super(NAME, MultiTermVectorsResponse::new);
    +        super(NAME);
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java
    index 4fa2bc84d3ff2..492673c18245a 100644
    --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java
    @@ -16,7 +16,7 @@ public class TermVectorsAction extends ActionType {
         public static final String NAME = "indices:data/read/tv";
     
         private TermVectorsAction() {
    -        super(NAME, TermVectorsResponse::new);
    +        super(NAME);
         }
     
     }
    diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
    index e7de3d7e70cfc..d0277e8ce8c80 100644
    --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
    @@ -37,7 +37,7 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
         private final IndicesService indicesService;
     
         private static final String ACTION_NAME = MultiTermVectorsAction.NAME + "[shard]";
    -    public static final ActionType TYPE = new ActionType<>(ACTION_NAME, MultiTermVectorsShardResponse::new);
    +    public static final ActionType TYPE = new ActionType<>(ACTION_NAME);
     
         @Inject
         public TransportShardMultiTermsVectorAction(
    diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
    index c9deec8c504a1..63ae56bfbd047 100644
    --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
    +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
    @@ -13,12 +13,12 @@
     import org.elasticsearch.action.ActionRunnable;
     import org.elasticsearch.action.ActionType;
     import org.elasticsearch.action.DocWriteRequest;
    +import org.elasticsearch.action.DocWriteResponse;
     import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
     import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
     import org.elasticsearch.action.delete.DeleteRequest;
     import org.elasticsearch.action.delete.DeleteResponse;
     import org.elasticsearch.action.index.IndexRequest;
    -import org.elasticsearch.action.index.IndexResponse;
     import org.elasticsearch.action.support.ActionFilters;
     import org.elasticsearch.action.support.AutoCreateIndex;
     import org.elasticsearch.action.support.TransportActions;
    @@ -60,7 +60,7 @@
     public class TransportUpdateAction extends TransportInstanceSingleOperationAction {
     
         public static final String NAME = "indices:data/write/update";
    -    public static final ActionType TYPE = new ActionType<>(NAME, UpdateResponse::new);
    +    public static final ActionType TYPE = new ActionType<>(NAME);
         private final AutoCreateIndex autoCreateIndex;
         private final UpdateHelper updateHelper;
         private final IndicesService indicesService;
    @@ -189,7 +189,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener<
                     final BytesReference upsertSourceBytes = upsertRequest.source();
                     client.bulk(
                         toSingleItemBulkRequest(upsertRequest),
    -                    unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> {
    +                    unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> {
                             UpdateResponse update = new UpdateResponse(
                                 response.getShardInfo(),
                                 response.getShardId(),
    @@ -231,7 +231,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener<
                     final BytesReference indexSourceBytes = indexRequest.source();
                     client.bulk(
                         toSingleItemBulkRequest(indexRequest),
    -                    unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> {
    +                    unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> {
                             UpdateResponse update = new UpdateResponse(
                                 response.getShardInfo(),
                                 response.getShardId(),
    diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
    index 600790b2fd841..d7b1ea46b77b0 100644
    --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
    +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
    @@ -57,7 +57,7 @@ public class UpdateRequest extends InstanceShardOperationRequest
     
         private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(UpdateRequest.class);
     
    -    private static ObjectParser PARSER;
    +    private static final ObjectParser PARSER;
     
         private static final ParseField SCRIPT_FIELD = new ParseField("script");
         private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert");
    @@ -183,6 +183,8 @@ public ActionRequestValidationException validate() {
     
             validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException);
     
    +        validationException = DocWriteRequest.validateDocIdLength(id, validationException);
    +
             if (ifSeqNo != UNASSIGNED_SEQ_NO) {
                 if (retryOnConflict > 0) {
                     validationException = addValidationError("compare and write operations can not be retried", validationException);
    @@ -831,6 +833,12 @@ public boolean isRequireAlias() {
             return requireAlias;
         }
     
    +    @Override
    +    public boolean isRequireDataStream() {
    +        // Always false because data streams cannot accept update operations
    +        return false;
    +    }
    +
         @Override
         public void process(IndexRouting indexRouting) {
             // Nothing to do
    diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java
    index c7bd513ff84d4..88bed844558f2 100644
    --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java
    @@ -348,4 +348,11 @@ public UpdateRequestBuilder setScriptedUpsert(boolean scriptedUpsert) {
             return this;
         }
     
    +    /**
    +     * Sets the require_alias flag
    +     */
    +    public UpdateRequestBuilder setRequireAlias(boolean requireAlias) {
    +        request.setRequireAlias(requireAlias);
    +        return this;
    +    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
    index b5368c2dc720e..c6454dd6cedd8 100644
    --- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
    +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
    @@ -46,7 +46,7 @@ public UpdateResponse(StreamInput in) throws IOException {
          * For example: update script with operation set to none
          */
         public UpdateResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) {
    -        this(new ShardInfo(0, 0), shardId, id, seqNo, primaryTerm, version, result);
    +        this(ShardInfo.EMPTY, shardId, id, seqNo, primaryTerm, version, result);
         }
     
         @SuppressWarnings("this-escape")
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/Client.java b/server/src/main/java/org/elasticsearch/client/internal/Client.java
    index 5ae3870338c35..668168764a4d0 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/Client.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/Client.java
    @@ -413,7 +413,7 @@ public interface Client extends ElasticsearchClient {
          * @throws IllegalArgumentException if the given clusterAlias doesn't exist
          * @throws UnsupportedOperationException if this functionality is not available on this client.
          */
    -    default Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    +    default RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
             throw new UnsupportedOperationException("this client doesn't support remote cluster connections");
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java
    index a8365a62c9e58..8e9977696bc18 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java
    @@ -24,7 +24,6 @@
     import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder;
    -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse;
    @@ -293,7 +292,7 @@ public interface ClusterAdminClient extends ElasticsearchClient {
          * @param request The nodes tasks request
          * @return The result future
          */
    -    ActionFuture cancelTasks(CancelTasksRequest request);
    +    ActionFuture cancelTasks(CancelTasksRequest request);
     
         /**
          * Cancel active tasks
    @@ -301,7 +300,7 @@ public interface ClusterAdminClient extends ElasticsearchClient {
          * @param request  The nodes tasks request
          * @param listener A listener to be notified with a result
          */
    -    void cancelTasks(CancelTasksRequest request, ActionListener listener);
    +    void cancelTasks(CancelTasksRequest request, ActionListener listener);
     
         /**
          * Cancel active tasks
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/FilterClient.java b/server/src/main/java/org/elasticsearch/client/internal/FilterClient.java
    index 53a8e2e189244..7feafe69fbcc2 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/FilterClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/FilterClient.java
    @@ -62,7 +62,7 @@ protected Client in() {
         }
     
         @Override
    -    public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    +    public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
             return in.getRemoteClusterClient(clusterAlias, responseExecutor);
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java
    index 9ba26b95244ab..d931302740f19 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java
    @@ -19,7 +19,6 @@
     import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
     import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
     import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
    -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
     import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
     import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder;
     import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
    @@ -30,10 +29,8 @@
     import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
     import org.elasticsearch.action.admin.indices.flush.FlushRequest;
     import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
    -import org.elasticsearch.action.admin.indices.flush.FlushResponse;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
    -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
     import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
     import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
     import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
    @@ -56,7 +53,6 @@
     import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
     import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
     import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
    -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
     import org.elasticsearch.action.admin.indices.resolve.ResolveIndexAction;
     import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
     import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder;
    @@ -71,7 +67,6 @@
     import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
     import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
     import org.elasticsearch.action.admin.indices.shrink.ResizeRequestBuilder;
    -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
    @@ -85,6 +80,7 @@
     import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
     import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
     import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.action.support.master.AcknowledgedResponse;
     import org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock;
     import org.elasticsearch.core.Nullable;
    @@ -261,7 +257,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request The refresh request
          * @return The result future
          */
    -    ActionFuture refresh(RefreshRequest request);
    +    ActionFuture refresh(RefreshRequest request);
     
         /**
          * Explicitly refresh one or more indices (making the content indexed since the last refresh searchable).
    @@ -269,7 +265,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request  The refresh request
          * @param listener A listener to be notified with a result
          */
    -    void refresh(RefreshRequest request, ActionListener listener);
    +    void refresh(RefreshRequest request, ActionListener listener);
     
         /**
          * Explicitly refresh one or more indices (making the content indexed since the last refresh searchable).
    @@ -282,7 +278,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request The flush request
          * @return A result future
          */
    -    ActionFuture flush(FlushRequest request);
    +    ActionFuture flush(FlushRequest request);
     
         /**
          * Explicitly flush one or more indices (releasing memory from the node).
    @@ -290,7 +286,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request  The flush request
          * @param listener A listener to be notified with a result
          */
    -    void flush(FlushRequest request, ActionListener listener);
    +    void flush(FlushRequest request, ActionListener listener);
     
         /**
          * Explicitly flush one or more indices (releasing memory from the node).
    @@ -303,7 +299,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request The optimize request
          * @return A result future
          */
    -    ActionFuture forceMerge(ForceMergeRequest request);
    +    ActionFuture forceMerge(ForceMergeRequest request);
     
         /**
          * Explicitly force merge one or more indices into a the number of segments.
    @@ -311,7 +307,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request  The force merge request
          * @param listener A listener to be notified with a result
          */
    -    void forceMerge(ForceMergeRequest request, ActionListener listener);
    +    void forceMerge(ForceMergeRequest request, ActionListener listener);
     
         /**
          * Explicitly force merge one or more indices into a the number of segments.
    @@ -436,7 +432,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request The clear indices cache request
          * @return The result future
          */
    -    ActionFuture clearCache(ClearIndicesCacheRequest request);
    +    ActionFuture clearCache(ClearIndicesCacheRequest request);
     
         /**
          * Clear indices cache.
    @@ -444,7 +440,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
          * @param request  The clear indices cache request
          * @param listener A listener to be notified with a result
          */
    -    void clearCache(ClearIndicesCacheRequest request, ActionListener listener);
    +    void clearCache(ClearIndicesCacheRequest request, ActionListener listener);
     
         /**
          * Clear indices cache.
    @@ -591,7 +587,7 @@ public interface IndicesAdminClient extends ElasticsearchClient {
         /**
          * Shrinks an index using an explicit request allowing to specify the settings, mappings and aliases of the target index of the index.
          */
    -    void resizeIndex(ResizeRequest request, ActionListener listener);
    +    void resizeIndex(ResizeRequest request, ActionListener listener);
     
         /**
          * Swaps the index pointed to by an alias given all provided conditions are satisfied
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java
    index e6393393916b1..c5bcd5e94a4fb 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java
    @@ -12,9 +12,11 @@
     import org.elasticsearch.action.ActionRequest;
     import org.elasticsearch.action.ActionResponse;
     import org.elasticsearch.action.ActionType;
    +import org.elasticsearch.action.RemoteClusterActionType;
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.tasks.TaskId;
    +import org.elasticsearch.transport.TransportResponse;
     
     import java.util.concurrent.Executor;
     
    @@ -62,8 +64,18 @@ protected  void
         }
     
         @Override
    -    public ParentTaskAssigningClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    -        Client remoteClient = super.getRemoteClusterClient(clusterAlias, responseExecutor);
    -        return new ParentTaskAssigningClient(remoteClient, parentTask);
    +    public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    +        final var delegate = super.getRemoteClusterClient(clusterAlias, responseExecutor);
    +        return new RemoteClusterClient() {
    +            @Override
    +            public  void execute(
    +                RemoteClusterActionType action,
    +                Request request,
    +                ActionListener listener
    +            ) {
    +                request.setParentTask(parentTask);
    +                delegate.execute(action, request, listener);
    +            }
    +        };
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java
    new file mode 100644
    index 0000000000000..337286533f2ab
    --- /dev/null
    +++ b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java
    @@ -0,0 +1,35 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.client.internal;
    +
    +import org.elasticsearch.action.ActionListener;
    +import org.elasticsearch.action.ActionRequest;
    +import org.elasticsearch.action.ActionType;
    +import org.elasticsearch.action.RemoteClusterActionType;
    +import org.elasticsearch.transport.TransportResponse;
    +
    +/**
    + * A client which can execute requests on a specific remote cluster.
    + */
    +public interface RemoteClusterClient {
    +    /**
    +     * Executes an action, denoted by an {@link ActionType}, on the remote cluster.
    +     *
    +     * @param action           The action type to execute.
    +     * @param request          The action request.
    +     * @param listener         A listener for the response
    +     * @param         The request type.
    +     * @param        the response type.
    +     */
    +     void execute(
    +        RemoteClusterActionType action,
    +        Request request,
    +        ActionListener listener
    +    );
    +}
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java
    index f75997d92b678..cbfc325aec497 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java
    @@ -14,9 +14,9 @@
     import org.elasticsearch.action.ActionType;
     import org.elasticsearch.action.support.TransportAction;
     import org.elasticsearch.client.internal.Client;
    +import org.elasticsearch.client.internal.RemoteClusterClient;
     import org.elasticsearch.client.internal.support.AbstractClient;
     import org.elasticsearch.cluster.node.DiscoveryNode;
    -import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.tasks.TaskCancelledException;
    @@ -46,7 +46,6 @@ public class NodeClient extends AbstractClient {
         private Supplier localNodeId;
         private Transport.Connection localConnection;
         private RemoteClusterService remoteClusterService;
    -    private NamedWriteableRegistry namedWriteableRegistry;
     
         public NodeClient(Settings settings, ThreadPool threadPool) {
             super(settings, threadPool);
    @@ -57,15 +56,13 @@ public void initialize(
             TaskManager taskManager,
             Supplier localNodeId,
             Transport.Connection localConnection,
    -        RemoteClusterService remoteClusterService,
    -        NamedWriteableRegistry namedWriteableRegistry
    +        RemoteClusterService remoteClusterService
         ) {
             this.actions = actions;
             this.taskManager = taskManager;
             this.localNodeId = localNodeId;
             this.localConnection = localConnection;
             this.remoteClusterService = remoteClusterService;
    -        this.namedWriteableRegistry = namedWriteableRegistry;
         }
     
         /**
    @@ -140,12 +137,7 @@ private  Transpo
         }
     
         @Override
    -    public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    -        return remoteClusterService.getRemoteClusterClient(threadPool(), clusterAlias, responseExecutor, true);
    +    public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    +        return remoteClusterService.getRemoteClusterClient(clusterAlias, responseExecutor, true);
         }
    -
    -    public NamedWriteableRegistry getNamedWriteableRegistry() {
    -        return namedWriteableRegistry;
    -    }
    -
     }
    diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java
    index 12f3dec804809..c6d9c3a8f3563 100644
    --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java
    +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java
    @@ -35,7 +35,6 @@
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder;
    -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder;
    @@ -129,7 +128,6 @@
     import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
     import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
     import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
    -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
     import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
     import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder;
     import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
    @@ -144,11 +142,9 @@
     import org.elasticsearch.action.admin.indices.flush.FlushAction;
     import org.elasticsearch.action.admin.indices.flush.FlushRequest;
     import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
    -import org.elasticsearch.action.admin.indices.flush.FlushResponse;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
    -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
     import org.elasticsearch.action.admin.indices.get.GetIndexAction;
     import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
     import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
    @@ -179,7 +175,6 @@
     import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
     import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
     import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
    -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
     import org.elasticsearch.action.admin.indices.resolve.ResolveIndexAction;
     import org.elasticsearch.action.admin.indices.rollover.RolloverAction;
     import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
    @@ -199,7 +194,6 @@
     import org.elasticsearch.action.admin.indices.shrink.ResizeAction;
     import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
     import org.elasticsearch.action.admin.indices.shrink.ResizeRequestBuilder;
    -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
     import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
    @@ -275,6 +269,7 @@
     import org.elasticsearch.action.search.TransportSearchAction;
     import org.elasticsearch.action.search.TransportSearchScrollAction;
     import org.elasticsearch.action.support.PlainActionFuture;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.action.support.master.AcknowledgedResponse;
     import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
     import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
    @@ -806,12 +801,12 @@ public GetTaskRequestBuilder prepareGetTask(TaskId taskId) {
             }
     
             @Override
    -        public ActionFuture cancelTasks(CancelTasksRequest request) {
    +        public ActionFuture cancelTasks(CancelTasksRequest request) {
                 return execute(CancelTasksAction.INSTANCE, request);
             }
     
             @Override
    -        public void cancelTasks(CancelTasksRequest request, ActionListener listener) {
    +        public void cancelTasks(CancelTasksRequest request, ActionListener listener) {
                 execute(CancelTasksAction.INSTANCE, request, listener);
             }
     
    @@ -1118,7 +1113,7 @@ public GetAliasesRequestBuilder prepareGetAliases(String... aliases) {
             }
     
             @Override
    -        public ActionFuture clearCache(final ClearIndicesCacheRequest request) {
    +        public ActionFuture clearCache(final ClearIndicesCacheRequest request) {
                 return execute(ClearIndicesCacheAction.INSTANCE, request);
             }
     
    @@ -1138,7 +1133,7 @@ public GetIndexRequestBuilder prepareGetIndex() {
             }
     
             @Override
    -        public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) {
    +        public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) {
                 execute(ClearIndicesCacheAction.INSTANCE, request, listener);
             }
     
    @@ -1218,12 +1213,12 @@ public OpenIndexRequestBuilder prepareOpen(String... indices) {
             }
     
             @Override
    -        public ActionFuture flush(final FlushRequest request) {
    +        public ActionFuture flush(final FlushRequest request) {
                 return execute(FlushAction.INSTANCE, request);
             }
     
             @Override
    -        public void flush(final FlushRequest request, final ActionListener listener) {
    +        public void flush(final FlushRequest request, final ActionListener listener) {
                 execute(FlushAction.INSTANCE, request, listener);
             }
     
    @@ -1278,12 +1273,12 @@ public PutMappingRequestBuilder preparePutMapping(String... indices) {
             }
     
             @Override
    -        public ActionFuture forceMerge(final ForceMergeRequest request) {
    +        public ActionFuture forceMerge(final ForceMergeRequest request) {
                 return execute(ForceMergeAction.INSTANCE, request);
             }
     
             @Override
    -        public void forceMerge(final ForceMergeRequest request, final ActionListener listener) {
    +        public void forceMerge(final ForceMergeRequest request, final ActionListener listener) {
                 execute(ForceMergeAction.INSTANCE, request, listener);
             }
     
    @@ -1293,12 +1288,12 @@ public ForceMergeRequestBuilder prepareForceMerge(String... indices) {
             }
     
             @Override
    -        public ActionFuture refresh(final RefreshRequest request) {
    +        public ActionFuture refresh(final RefreshRequest request) {
                 return execute(RefreshAction.INSTANCE, request);
             }
     
             @Override
    -        public void refresh(final RefreshRequest request, final ActionListener listener) {
    +        public void refresh(final RefreshRequest request, final ActionListener listener) {
                 execute(RefreshAction.INSTANCE, request, listener);
             }
     
    @@ -1453,7 +1448,7 @@ public ResizeRequestBuilder prepareResizeIndex(String sourceIndex, String target
             }
     
             @Override
    -        public void resizeIndex(ResizeRequest request, ActionListener listener) {
    +        public void resizeIndex(ResizeRequest request, ActionListener listener) {
                 execute(ResizeAction.INSTANCE, request, listener);
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java
    index c2b61e496e9c9..0f83e6f2d8e19 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java
    @@ -144,7 +144,7 @@ public Iterator toXContentChunked(ToXContent.Params params
             return Iterators.concat(startObject("nodes"), Iterators.map(leastAvailableSpaceUsage.entrySet().iterator(), c -> (builder, p) -> {
                 builder.startObject(c.getKey());
                 { // node
    -                builder.field("node_name", c.getValue().getNodeName());
    +                builder.field("node_name", c.getValue().nodeName());
                     builder.startObject("least_available");
                     {
                         c.getValue().toShortXContent(builder);
    diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java
    index 7dbd4f864bdb3..593bb251d3f5e 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java
    @@ -122,14 +122,14 @@ private void modifyDiskUsage(String nodeId, long freeDelta) {
             if (diskUsage == null) {
                 return;
             }
    -        var path = diskUsage.getPath();
    +        var path = diskUsage.path();
             updateDiskUsage(leastAvailableSpaceUsage, nodeId, path, freeDelta);
             updateDiskUsage(mostAvailableSpaceUsage, nodeId, path, freeDelta);
         }
     
         private void updateDiskUsage(Map availableSpaceUsage, String nodeId, String path, long freeDelta) {
             var usage = availableSpaceUsage.get(nodeId);
    -        if (usage != null && Objects.equals(usage.getPath(), path)) {
    +        if (usage != null && Objects.equals(usage.path(), path)) {
                 // ensure new value is within bounds
                 availableSpaceUsage.put(nodeId, updateWithFreeBytes(usage, freeDelta));
             }
    @@ -139,7 +139,7 @@ private static DiskUsage updateWithFreeBytes(DiskUsage usage, long delta) {
             // free bytes might go out of range in case when multiple data path are used
             // we might not know exact disk used to allocate a shard and conservatively update
             // most used disk on a target node and least used disk on a source node
    -        var freeBytes = withinRange(0, usage.getTotalBytes(), usage.freeBytes() + delta);
    +        var freeBytes = withinRange(0, usage.totalBytes(), usage.freeBytes() + delta);
             return usage.copyWithFreeBytes(freeBytes);
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
    index 03b32ea0b3bfb..74deb90ee411a 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
    @@ -332,12 +332,21 @@ private void logUnexpectedException(Exception exception, String format, Object..
     
         public interface Listener {
     
    -        /** called when a new state is observed */
    +        /**
    +         * Called when a new state is observed. Implementations should avoid doing heavy operations on the calling thread and fork to
    +         * a threadpool if necessary to avoid blocking the {@link ClusterApplierService}. Note that operations such as sending a new
    +         * request (e.g. via {@link org.elasticsearch.client.internal.Client} or {@link org.elasticsearch.transport.TransportService})
    +         * is cheap enough to be performed without forking.
    +         */
             void onNewClusterState(ClusterState state);
     
             /** called when the cluster service is closed */
             void onClusterServiceClose();
     
    +        /**
    +         * Called when the {@link ClusterStateObserver} times out while waiting for a new matching cluster state if a timeout is
    +         * used when creating the observer. Upon timeout, {@code onTimeout} is called on the GENERIC threadpool.
    +         */
             void onTimeout(TimeValue timeout);
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/DiskUsage.java b/server/src/main/java/org/elasticsearch/cluster/DiskUsage.java
    index 1d606737edf3a..3bc6f889e5778 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/DiskUsage.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/DiskUsage.java
    @@ -53,33 +53,21 @@ private static double truncatePercent(double pct) {
         XContentBuilder toShortXContent(XContentBuilder builder) throws IOException {
             builder.field("path", this.path);
             builder.humanReadableField("total_bytes", "total", ByteSizeValue.ofBytes(this.totalBytes));
    -        builder.humanReadableField("used_bytes", "used", ByteSizeValue.ofBytes(this.getUsedBytes()));
    +        builder.humanReadableField("used_bytes", "used", ByteSizeValue.ofBytes(this.usedBytes()));
             builder.humanReadableField("free_bytes", "free", ByteSizeValue.ofBytes(this.freeBytes));
    -        builder.field("free_disk_percent", truncatePercent(this.getFreeDiskAsPercentage()));
    -        builder.field("used_disk_percent", truncatePercent(this.getUsedDiskAsPercentage()));
    +        builder.field("free_disk_percent", truncatePercent(this.freeDiskAsPercentage()));
    +        builder.field("used_disk_percent", truncatePercent(this.usedDiskAsPercentage()));
             return builder;
         }
     
         public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
             builder.field("node_id", this.nodeId);
             builder.field("node_name", this.nodeName);
    -        builder = toShortXContent(builder);
    +        toShortXContent(builder);
             return builder;
         }
     
    -    public String getNodeId() {
    -        return nodeId;
    -    }
    -
    -    public String getNodeName() {
    -        return nodeName;
    -    }
    -
    -    public String getPath() {
    -        return path;
    -    }
    -
    -    public double getFreeDiskAsPercentage() {
    +    public double freeDiskAsPercentage() {
             // We return 100.0% in order to fail "open", in that if we have invalid
             // numbers for the total bytes, it's as if we don't know disk usage.
             if (totalBytes == 0) {
    @@ -88,20 +76,12 @@ public double getFreeDiskAsPercentage() {
             return 100.0 * freeBytes / totalBytes;
         }
     
    -    public double getUsedDiskAsPercentage() {
    -        return 100.0 - getFreeDiskAsPercentage();
    -    }
    -
    -    public long getFreeBytes() {
    -        return freeBytes;
    -    }
    -
    -    public long getTotalBytes() {
    -        return totalBytes;
    +    public double usedDiskAsPercentage() {
    +        return 100.0 - freeDiskAsPercentage();
         }
     
    -    public long getUsedBytes() {
    -        return getTotalBytes() - getFreeBytes();
    +    public long usedBytes() {
    +        return totalBytes - freeBytes;
         }
     
         @Override
    @@ -113,9 +93,9 @@ public String toString() {
                 + "]["
                 + path
                 + "] free: "
    -            + ByteSizeValue.ofBytes(getFreeBytes())
    +            + ByteSizeValue.ofBytes(this.freeBytes())
                 + "["
    -            + Strings.format1Decimals(getFreeDiskAsPercentage(), "%")
    +            + Strings.format1Decimals(freeDiskAsPercentage(), "%")
                 + "]";
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java
    index 5134f153a7fbb..c2cd403836593 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java
    @@ -9,6 +9,7 @@
     
     import org.apache.logging.log4j.LogManager;
     import org.apache.logging.log4j.Logger;
    +import org.elasticsearch.TransportVersions;
     import org.elasticsearch.cluster.ClusterState;
     import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration;
     import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection;
    @@ -140,6 +141,7 @@ public record ClusterFormationState(
             VotingConfiguration lastCommittedConfiguration,
             List resolvedAddresses,
             List foundPeers,
    +        Set mastersOfPeers,
             long currentTerm,
             boolean hasDiscoveredQuorum,
             StatusInfo statusInfo,
    @@ -151,6 +153,7 @@ public ClusterFormationState(
                 ClusterState clusterState,
                 List resolvedAddresses,
                 List foundPeers,
    +            Set mastersOfPeers,
                 long currentTerm,
                 ElectionStrategy electionStrategy,
                 StatusInfo statusInfo,
    @@ -166,6 +169,7 @@ public ClusterFormationState(
                     clusterState.getLastCommittedConfiguration(),
                     resolvedAddresses,
                     foundPeers,
    +                mastersOfPeers,
                     currentTerm,
                     calculateHasDiscoveredQuorum(
                         foundPeers,
    @@ -216,6 +220,9 @@ public ClusterFormationState(StreamInput in) throws IOException {
                     new VotingConfiguration(in),
                     in.readCollectionAsImmutableList(TransportAddress::new),
                     in.readCollectionAsImmutableList(DiscoveryNode::new),
    +                in.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)
    +                    ? in.readCollectionAsImmutableSet(DiscoveryNode::new)
    +                    : Set.of(),
                     in.readLong(),
                     in.readBoolean(),
                     new StatusInfo(in),
    @@ -250,12 +257,19 @@ private String getCoordinatorDescription() {
                     acceptedTerm
                 );
     
    -            final StringBuilder foundPeersDescription = new StringBuilder();
    +            final StringBuilder foundPeersDescription = new StringBuilder("[");
                 DiscoveryNodes.addCommaSeparatedNodesWithoutAttributes(foundPeers.iterator(), foundPeersDescription);
    +            if (mastersOfPeers.isEmpty()) {
    +                foundPeersDescription.append(']');
    +            } else {
    +                foundPeersDescription.append("] who claim current master to be [");
    +                DiscoveryNodes.addCommaSeparatedNodesWithoutAttributes(mastersOfPeers.iterator(), foundPeersDescription);
    +                foundPeersDescription.append(']');
    +            }
     
                 final String discoveryStateIgnoringQuorum = String.format(
                     Locale.ROOT,
    -                "have discovered [%s]; %s",
    +                "have discovered %s; %s",
                     foundPeersDescription,
                     discoveryWillContinueDescription
                 );
    @@ -291,7 +305,7 @@ private String getCoordinatorDescription() {
                 if (lastCommittedConfiguration.equals(VotingConfiguration.MUST_JOIN_ELECTED_MASTER)) {
                     return String.format(
                         Locale.ROOT,
    -                    "master not discovered yet and this node was detached from its previous cluster, have discovered [%s]; %s",
    +                    "master not discovered yet and this node was detached from its previous cluster, have discovered %s; %s",
                         foundPeersDescription,
                         discoveryWillContinueDescription
                     );
    @@ -310,7 +324,7 @@ private String getCoordinatorDescription() {
     
                 return String.format(
                     Locale.ROOT,
    -                "master not discovered or elected yet, an election requires %s, %s [%s]; %s",
    +                "master not discovered or elected yet, an election requires %s, %s %s; %s",
                     quorumDescription,
                     haveDiscoveredQuorum,
                     foundPeersDescription,
    @@ -388,6 +402,9 @@ public void writeTo(StreamOutput out) throws IOException {
                 lastCommittedConfiguration.writeTo(out);
                 out.writeCollection(resolvedAddresses);
                 out.writeCollection(foundPeers);
    +            if (out.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)) {
    +                out.writeCollection(mastersOfPeers);
    +            }
                 out.writeLong(currentTerm);
                 out.writeBoolean(hasDiscoveredQuorum);
                 statusInfo.writeTo(out);
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java
    index 229c34ecc1a14..d16f4ee27e8a1 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java
    @@ -15,7 +15,6 @@
     import org.elasticsearch.action.ActionListenerResponseHandler;
     import org.elasticsearch.action.ActionRequest;
     import org.elasticsearch.action.ActionResponse;
    -import org.elasticsearch.action.ActionType;
     import org.elasticsearch.action.admin.cluster.coordination.ClusterFormationInfoAction;
     import org.elasticsearch.action.admin.cluster.coordination.CoordinationDiagnosticsAction;
     import org.elasticsearch.cluster.ClusterChangedEvent;
    @@ -944,7 +943,8 @@ private Scheduler.Cancellable fetchClusterFormationInfo(
             return sendTransportRequest(
                 node,
                 responseConsumer,
    -            ClusterFormationInfoAction.INSTANCE,
    +            ClusterFormationInfoAction.NAME,
    +            ClusterFormationInfoAction.Response::new,
                 new ClusterFormationInfoAction.Request(),
                 (response, e) -> {
                     assert response != null || e != null : "a response or an exception must be provided";
    @@ -1062,7 +1062,8 @@ private Scheduler.Cancellable fetchCoordinationDiagnostics(
             return sendTransportRequest(
                 masterEligibleNode,
                 responseConsumer,
    -            CoordinationDiagnosticsAction.INSTANCE,
    +            CoordinationDiagnosticsAction.NAME,
    +            CoordinationDiagnosticsAction.Response::new,
                 new CoordinationDiagnosticsAction.Request(true),
                 (response, e) -> {
                     assert response != null || e != null : "a response or an exception must be provided";
    @@ -1083,7 +1084,8 @@ private Scheduler.Cancellable fetchCoordinationDiagnostics(
          * @param masterEligibleNode        The master eligible node to be queried, or null if we do not yet know of a master eligible node.
          *                                  If this is null, the responseConsumer will be given a null response
          * @param responseConsumer          The consumer of the transformed response
    -     * @param transportActionType       The ActionType for the transport action
    +     * @param actionName                The name of the transport action
    +     * @param responseReader            How to deserialize the transport response
          * @param transportActionRequest    The ActionRequest to be sent
          * @param responseTransformationFunction A function that converts a response or exception to the response type expected by the
          *                                       responseConsumer
    @@ -1092,7 +1094,8 @@ private Scheduler.Cancellable fetchCoordinationDiagnostics(
         private  Scheduler.Cancellable sendTransportRequest(
             @Nullable DiscoveryNode masterEligibleNode,
             Consumer responseConsumer,
    -        ActionType transportActionType,
    +        String actionName,
    +        Writeable.Reader responseReader,
             ActionRequest transportActionRequest,
             BiFunction responseTransformationFunction
         ) {
    @@ -1109,12 +1112,12 @@ private  Scheduler.Cancellable sendTransportRequest
                     final TimeValue transportTimeout = TimeValue.timeValueSeconds(10);
                     transportService.sendRequest(
                         masterEligibleNode,
    -                    transportActionType.name(),
    +                    actionName,
                         transportActionRequest,
                         TransportRequestOptions.timeout(transportTimeout),
                         new ActionListenerResponseHandler<>(
                             ActionListener.runBefore(fetchRemoteResultListener, () -> Releasables.close(releasable)),
    -                        transportActionType.getResponseReader(),
    +                        responseReader,
                             clusterCoordinationExecutor
                         )
                     );
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java
    index 3da890b37ade8..927ca1152a658 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java
    @@ -334,6 +334,7 @@ public ClusterFormationState getClusterFormationState() {
                 getLastAcceptedState(), // doesn't care about blocks or the current master node so no need for getStateForMasterService
                 peerFinder.getLastResolvedAddresses(),
                 Stream.concat(Stream.of(getLocalNode()), StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false)).toList(),
    +            peerFinder.getMastersOfPeers(),
                 getCurrentTerm(),
                 electionStrategy,
                 nodeHealthService.getHealth(),
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java
    index feb0543aad625..cdd21efce3ed5 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java
    @@ -27,7 +27,6 @@
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.monitor.NodeHealthService;
     import org.elasticsearch.monitor.StatusInfo;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.threadpool.ThreadPool.Names;
     import org.elasticsearch.transport.ConnectTransportException;
     import org.elasticsearch.transport.ReceiveTimeoutTransportException;
    @@ -307,7 +306,7 @@ private void handleWakeUp() {
                     new TransportResponseHandler.Empty() {
     
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java
    index d11d8ade2a036..815f531e50e3b 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java
    @@ -317,7 +317,7 @@ public void onResponse(Void unused) {
                                         TransportRequestOptions.of(null, TransportRequestOptions.Type.PING),
                                         new TransportResponseHandler.Empty() {
                                             @Override
    -                                        public Executor executor(ThreadPool threadPool) {
    +                                        public Executor executor() {
                                                 return TransportResponseHandler.TRANSPORT_WORKER;
                                             }
     
    @@ -379,7 +379,7 @@ void sendStartJoinRequest(final StartJoinRequest startJoinRequest, final Discove
                 : "sending start-join request for master-ineligible " + startJoinRequest.getMasterCandidateNode();
             transportService.sendRequest(destination, START_JOIN_ACTION_NAME, startJoinRequest, new TransportResponseHandler.Empty() {
                 @Override
    -            public Executor executor(ThreadPool threadPool) {
    +            public Executor executor() {
                     return TransportResponseHandler.TRANSPORT_WORKER;
                 }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java
    index 9fcae5bcf67f8..d7c9dd1feb2b5 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java
    @@ -13,7 +13,6 @@
     import org.elasticsearch.ExceptionsHelper;
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.cluster.node.DiscoveryNodes;
    -import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.settings.Setting;
    @@ -26,7 +25,6 @@
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.monitor.NodeHealthService;
     import org.elasticsearch.monitor.StatusInfo;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.threadpool.ThreadPool.Names;
     import org.elasticsearch.transport.ConnectTransportException;
     import org.elasticsearch.transport.NodeDisconnectedException;
    @@ -164,7 +162,7 @@ void updateLeader(@Nullable final DiscoveryNode leader) {
          */
         void setCurrentNodes(DiscoveryNodes discoveryNodes) {
             // Sorting the nodes for deterministic logging until https://github.com/elastic/elasticsearch/issues/94946 is fixed
    -        logger.trace(() -> Strings.format("setCurrentNodes: {}", discoveryNodes.mastersFirstStream().toList()));
    +        logger.trace(() -> format("setCurrentNodes: %s", discoveryNodes.mastersFirstStream().toList()));
             this.discoveryNodes = discoveryNodes;
         }
     
    @@ -239,7 +237,7 @@ void handleWakeUp() {
                     TransportRequestOptions.of(leaderCheckTimeout, Type.PING),
                     new TransportResponseHandler.Empty() {
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java
    index 56289ab348a3a..80b4b455912e7 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java
    @@ -10,6 +10,7 @@
     
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.cluster.service.ClusterService;
    +import org.elasticsearch.common.ReferenceDocs;
     import org.elasticsearch.core.Nullable;
     import org.elasticsearch.health.Diagnosis;
     import org.elasticsearch.health.HealthIndicatorDetails;
    @@ -39,14 +40,36 @@
     public class StableMasterHealthIndicatorService implements HealthIndicatorService {
     
         public static final String NAME = "master_is_stable";
    -    public static final String GET_HELP_GUIDE = "https://ela.st/getting-help";
    +
    +    public static final Diagnosis TROUBLESHOOT_DISCOVERY = new Diagnosis(
    +        new Diagnosis.Definition(
    +            NAME,
    +            "troubleshoot_discovery",
    +            "The Elasticsearch cluster does not have a stable master node.",
    +            "See discovery troubleshooting guidance at " + ReferenceDocs.DISCOVERY_TROUBLESHOOTING,
    +            ReferenceDocs.DISCOVERY_TROUBLESHOOTING.toString()
    +        ),
    +        null
    +    );
    +
    +    public static final Diagnosis TROUBLESHOOT_UNSTABLE_CLUSTER = new Diagnosis(
    +        new Diagnosis.Definition(
    +            NAME,
    +            "troubleshoot_unstable_cluster",
    +            "The Elasticsearch cluster does not have a stable master node.",
    +            "See unstable cluster troubleshooting guidance at " + ReferenceDocs.UNSTABLE_CLUSTER_TROUBLESHOOTING,
    +            ReferenceDocs.UNSTABLE_CLUSTER_TROUBLESHOOTING.toString()
    +        ),
    +        null
    +    );
    +
         public static final Diagnosis CONTACT_SUPPORT = new Diagnosis(
             new Diagnosis.Definition(
                 NAME,
                 "contact_support",
                 "The Elasticsearch cluster does not have a stable master node.",
    -            "Get help at " + GET_HELP_GUIDE,
    -            GET_HELP_GUIDE
    +            "Get help at " + ReferenceDocs.CONTACT_SUPPORT,
    +            ReferenceDocs.CONTACT_SUPPORT.toString()
             ),
             null
         );
    @@ -67,12 +90,13 @@ public class StableMasterHealthIndicatorService implements HealthIndicatorServic
         public static final String BACKUP_DISABLED_IMPACT_ID = "backup_disabled";
     
         // Impacts of having an unstable master:
    -    private static final String UNSTABLE_MASTER_INGEST_IMPACT = "The cluster cannot create, delete, or rebalance indices, and cannot "
    -        + "insert or update documents.";
    -    private static final String UNSTABLE_MASTER_DEPLOYMENT_MANAGEMENT_IMPACT = "Scheduled tasks such as Watcher, Index Lifecycle "
    -        + "Management, and Snapshot Lifecycle Management will not work. The _cat APIs will not work.";
    -    private static final String UNSTABLE_MASTER_BACKUP_IMPACT = "Snapshot and restore will not work, your data will not be backed up. "
    -        + "Searchable snapshots cannot be mounted.";
    +    private static final String UNSTABLE_MASTER_INGEST_IMPACT = """
    +        The cluster cannot create, delete, or rebalance indices, and cannot insert or update documents.""";
    +    private static final String UNSTABLE_MASTER_DEPLOYMENT_MANAGEMENT_IMPACT = """
    +        Scheduled tasks such as Watcher, Index Lifecycle Management, and Snapshot Lifecycle Management will not work. \
    +        The _cat APIs will not work.""";
    +    private static final String UNSTABLE_MASTER_BACKUP_IMPACT = """
    +        Snapshot and restore will not work. Your data will not be backed up, and searchable snapshots cannot be mounted.""";
     
         /**
          * This is the list of the impacts to be reported when the master node is determined to be unstable.
    @@ -128,7 +152,7 @@ HealthIndicatorResult getHealthIndicatorResult(
             HealthStatus status = HealthStatus.fromCoordinationDiagnosticsStatus(coordinationDiagnosticsResult.status());
             HealthIndicatorDetails details = getDetails(coordinationDiagnosticsResult.details(), explain);
             Collection impacts = status.indicatesHealthProblem() ? UNSTABLE_MASTER_IMPACTS : List.of();
    -        List diagnosis = status.indicatesHealthProblem() ? getContactSupportUserActions(explain) : List.of();
    +        List diagnosis = status.indicatesHealthProblem() ? getUnstableMasterDiagnoses(explain) : List.of();
             return createIndicator(status, coordinationDiagnosticsResult.summary(), details, impacts, diagnosis);
         }
     
    @@ -215,13 +239,16 @@ private String getNameForNodeId(String nodeId) {
         }
     
         /**
    -     * This method returns the only user action that is relevant when the master is unstable -- contact support.
    -     * @param explain If true, the returned list includes a UserAction to contact support, otherwise an empty list
    -     * @return a single UserAction instructing users to contact support.
    +     * This method returns the relevant user actions when the master is unstable, linking to some troubleshooting docs and suggesting to
    +     * contact support.
    +     *
    +     * @param explain If true, the returned list includes UserActions linking to troubleshooting docs and another to contact support,
    +     *                otherwise an empty list.
    +     * @return the relevant user actions when the master is unstable.
          */
    -    private static List getContactSupportUserActions(boolean explain) {
    +    private List getUnstableMasterDiagnoses(boolean explain) {
             if (explain) {
    -            return List.of(CONTACT_SUPPORT);
    +            return List.of(TROUBLESHOOT_DISCOVERY, TROUBLESHOOT_UNSTABLE_CLUSTER, CONTACT_SUPPORT);
             } else {
                 return List.of();
             }
    diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollector.java b/server/src/main/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollector.java
    index 7bc3514206ffe..bf33f97f2ad42 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollector.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollector.java
    @@ -18,7 +18,6 @@
     import org.elasticsearch.core.Tuple;
     import org.elasticsearch.monitor.NodeHealthService;
     import org.elasticsearch.monitor.StatusInfo;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.threadpool.ThreadPool.Names;
     import org.elasticsearch.transport.TransportException;
     import org.elasticsearch.transport.TransportResponseHandler;
    @@ -159,7 +158,7 @@ public void handleException(TransportException exp) {
                             }
     
                             @Override
    -                        public Executor executor(ThreadPool threadPool) {
    +                        public Executor executor() {
                                 return clusterCoordinationExecutor;
                             }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidator.java b/server/src/main/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidator.java
    deleted file mode 100644
    index a2df7c234680a..0000000000000
    --- a/server/src/main/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidator.java
    +++ /dev/null
    @@ -1,74 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.cluster.desirednodes;
    -
    -import org.elasticsearch.Build;
    -import org.elasticsearch.Version;
    -import org.elasticsearch.cluster.metadata.DesiredNode;
    -import org.elasticsearch.core.Nullable;
    -
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.Locale;
    -import java.util.function.Consumer;
    -import java.util.stream.Collectors;
    -
    -import static java.lang.String.format;
    -
    -public class DesiredNodesSettingsValidator implements Consumer> {
    -    private record DesiredNodeValidationError(int position, @Nullable String externalId, RuntimeException exception) {}
    -
    -    @Override
    -    public void accept(List nodes) {
    -        final List validationErrors = new ArrayList<>();
    -        for (int i = 0; i < nodes.size(); i++) {
    -            final DesiredNode node = nodes.get(i);
    -            if (node.version().before(Version.CURRENT)) {
    -                validationErrors.add(
    -                    new DesiredNodeValidationError(
    -                        i,
    -                        node.externalId(),
    -                        new IllegalArgumentException(
    -                            format(
    -                                Locale.ROOT,
    -                                "Illegal node version [%s]. Only [%s] or newer versions are supported",
    -                                node.version(),
    -                                Build.current().version()
    -                            )
    -                        )
    -                    )
    -                );
    -            }
    -        }
    -
    -        if (validationErrors.isEmpty() == false) {
    -            final String nodeIndicesWithFailures = validationErrors.stream()
    -                .map(DesiredNodeValidationError::position)
    -                .map(i -> Integer.toString(i))
    -                .collect(Collectors.joining(","));
    -
    -            final String nodeIdsWithFailures = validationErrors.stream()
    -                .map(DesiredNodeValidationError::externalId)
    -                .collect(Collectors.joining(","));
    -            IllegalArgumentException invalidSettingsException = new IllegalArgumentException(
    -                format(
    -                    Locale.ROOT,
    -                    "Nodes with ids [%s] in positions [%s] contain invalid settings",
    -                    nodeIdsWithFailures,
    -                    nodeIndicesWithFailures
    -                )
    -            );
    -            for (DesiredNodeValidationError exceptionTuple : validationErrors) {
    -                invalidSettingsException.addSuppressed(exceptionTuple.exception);
    -            }
    -            throw invalidSettingsException;
    -        }
    -    }
    -
    -}
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasInfo.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasInfo.java
    new file mode 100644
    index 0000000000000..3c6db4fb39b1b
    --- /dev/null
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasInfo.java
    @@ -0,0 +1,16 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.cluster.metadata;
    +
    +/**
    + * Used as a common interface for AliasMetadata and DataStreamAlias
    + */
    +interface AliasInfo {
    +    String getAlias();
    +}
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetadata.java
    index e813624ef9527..a0f4a929dafdb 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetadata.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetadata.java
    @@ -32,7 +32,7 @@
     
     import static java.util.Collections.emptySet;
     
    -public class AliasMetadata implements SimpleDiffable, ToXContentFragment {
    +public class AliasMetadata implements SimpleDiffable, ToXContentFragment, AliasInfo {
     
         private final String alias;
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java
    index 84db5887b5926..ff31c6fe950d7 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java
    @@ -907,7 +907,7 @@ public DataStream(StreamInput in) throws IOException {
                 in.readBoolean(),
                 in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false,
                 in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null,
    -            in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null,
    +            in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null,
                 in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false,
                 in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(),
                 in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED) ? in.readBoolean() : false
    @@ -944,7 +944,7 @@ public void writeTo(StreamOutput out) throws IOException {
             if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
                 out.writeOptionalEnum(indexMode);
             }
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeOptionalWriteable(lifecycle);
             }
             if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) {
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java
    index 4c86a91ee82f2..ba5e662cca5f9 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java
    @@ -37,7 +37,7 @@
     import java.util.function.Predicate;
     import java.util.stream.Collectors;
     
    -public class DataStreamAlias implements SimpleDiffable, ToXContentFragment {
    +public class DataStreamAlias implements SimpleDiffable, ToXContentFragment, AliasInfo {
     
         public static final ParseField DATA_STREAMS_FIELD = new ParseField("data_streams");
         public static final ParseField WRITE_DATA_STREAM_FIELD = new ParseField("write_data_stream");
    @@ -191,6 +191,13 @@ public String getName() {
             return name;
         }
     
    +    /**
    +     * Returns the alias name, which is the same value as getName()
    +     */
    +    public String getAlias() {
    +        return getName();
    +    }
    +
         /**
          * Returns the data streams that are referenced
          */
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java
    index 83a5d99c8f348..215ed515748ab 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java
    @@ -48,7 +48,7 @@
     public class DataStreamLifecycle implements SimpleDiffable, ToXContentObject {
     
         // Versions over the wire
    -    public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_500_061;
    +    public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_10_X;
     
         public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode";
     
    @@ -187,7 +187,7 @@ public int hashCode() {
     
         @Override
         public void writeTo(StreamOutput out) throws IOException {
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeOptionalWriteable(dataRetention);
             }
             if (out.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) {
    @@ -197,7 +197,7 @@ public void writeTo(StreamOutput out) throws IOException {
         }
     
         public DataStreamLifecycle(StreamInput in) throws IOException {
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 dataRetention = in.readOptionalWriteable(Retention::read);
             } else {
                 dataRetention = null;
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java
    index 27beffe56b97a..1ce950cf71f58 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java
    @@ -13,6 +13,7 @@
     import org.elasticsearch.Version;
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.cluster.node.DiscoveryNodeRole;
    +import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.io.stream.Writeable;
    @@ -20,6 +21,7 @@
     import org.elasticsearch.common.unit.ByteSizeValue;
     import org.elasticsearch.common.unit.Processors;
     import org.elasticsearch.core.Nullable;
    +import org.elasticsearch.core.UpdateForV9;
     import org.elasticsearch.features.NodeFeature;
     import org.elasticsearch.xcontent.ConstructingObjectParser;
     import org.elasticsearch.xcontent.ObjectParser;
    @@ -35,6 +37,7 @@
     import java.util.Set;
     import java.util.TreeSet;
     import java.util.function.Predicate;
    +import java.util.regex.Pattern;
     
     import static java.lang.String.format;
     import static org.elasticsearch.node.Node.NODE_EXTERNAL_ID_SETTING;
    @@ -45,6 +48,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl
     
         public static final NodeFeature RANGE_FLOAT_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.range_float_processors");
         public static final NodeFeature DOUBLE_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.double_processors");
    +    public static final NodeFeature DESIRED_NODE_VERSION_DEPRECATED = new NodeFeature("desired_node.version_deprecated");
     
         public static final TransportVersion RANGE_FLOAT_PROCESSORS_SUPPORT_TRANSPORT_VERSION = TransportVersions.V_8_3_0;
     
    @@ -53,6 +57,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl
         private static final ParseField PROCESSORS_RANGE_FIELD = new ParseField("processors_range");
         private static final ParseField MEMORY_FIELD = new ParseField("memory");
         private static final ParseField STORAGE_FIELD = new ParseField("storage");
    +    @UpdateForV9 // Remove deprecated field
         private static final ParseField VERSION_FIELD = new ParseField("node_version");
     
         public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    @@ -64,7 +69,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl
                 (ProcessorsRange) args[2],
                 (ByteSizeValue) args[3],
                 (ByteSizeValue) args[4],
    -            (Version) args[5]
    +            (String) args[5]
             )
         );
     
    @@ -99,49 +104,57 @@ static  void configureParser(ConstructingObjectParser parser) {
                 ObjectParser.ValueType.STRING
             );
             parser.declareField(
    -            ConstructingObjectParser.constructorArg(),
    -            (p, c) -> parseVersion(p.text()),
    +            ConstructingObjectParser.optionalConstructorArg(),
    +            (p, c) -> p.text(),
                 VERSION_FIELD,
                 ObjectParser.ValueType.STRING
             );
         }
     
    -    private static Version parseVersion(String version) {
    -        if (version == null || version.isBlank()) {
    -            throw new IllegalArgumentException(VERSION_FIELD.getPreferredName() + " must not be empty");
    -        }
    -        return Version.fromString(version);
    -    }
    -
         private final Settings settings;
         private final Processors processors;
         private final ProcessorsRange processorsRange;
         private final ByteSizeValue memory;
         private final ByteSizeValue storage;
    -    private final Version version;
    +
    +    @UpdateForV9 // Remove deprecated version field
    +    private final String version;
         private final String externalId;
         private final Set roles;
     
    -    public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, Version version) {
    +    @Deprecated
    +    public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, String version) {
             this(settings, null, processorsRange, memory, storage, version);
         }
     
    -    public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, Version version) {
    +    @Deprecated
    +    public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, String version) {
             this(settings, Processors.of(processors), null, memory, storage, version);
         }
     
    +    public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) {
    +        this(settings, null, processorsRange, memory, storage);
    +    }
    +
    +    public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage) {
    +        this(settings, Processors.of(processors), null, memory, storage);
    +    }
    +
    +    DesiredNode(Settings settings, Processors processors, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) {
    +        this(settings, processors, processorsRange, memory, storage, null);
    +    }
    +
         DesiredNode(
             Settings settings,
             Processors processors,
             ProcessorsRange processorsRange,
             ByteSizeValue memory,
             ByteSizeValue storage,
    -        Version version
    +        @Deprecated String version
         ) {
             assert settings != null;
             assert memory != null;
             assert storage != null;
    -        assert version != null;
     
             if (processors == null && processorsRange == null) {
                 throw new IllegalArgumentException(
    @@ -190,10 +203,27 @@ public static DesiredNode readFrom(StreamInput in) throws IOException {
             }
             final var memory = ByteSizeValue.readFrom(in);
             final var storage = ByteSizeValue.readFrom(in);
    -        final var version = Version.readVersion(in);
    +        final String version;
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) {
    +            version = in.readOptionalString();
    +        } else {
    +            version = Version.readVersion(in).toString();
    +        }
             return new DesiredNode(settings, processors, processorsRange, memory, storage, version);
         }
     
    +    private static final Pattern SEMANTIC_VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)\\D?.*");
    +
    +    private static Version parseLegacyVersion(String version) {
    +        if (version != null) {
    +            var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version);
    +            if (semanticVersionMatcher.matches()) {
    +                return Version.fromString(semanticVersionMatcher.group(1));
    +            }
    +        }
    +        return null;
    +    }
    +
         @Override
         public void writeTo(StreamOutput out) throws IOException {
             settings.writeTo(out);
    @@ -207,7 +237,17 @@ public void writeTo(StreamOutput out) throws IOException {
             }
             memory.writeTo(out);
             storage.writeTo(out);
    -        Version.writeVersion(version, out);
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) {
    +            out.writeOptionalString(version);
    +        } else {
    +            Version parsedVersion = parseLegacyVersion(version);
    +            if (version == null) {
    +                // Some node is from before we made the version field not required. If so, fill in with the current node version.
    +                Version.writeVersion(Version.CURRENT, out);
    +            } else {
    +                Version.writeVersion(parsedVersion, out);
    +            }
    +        }
         }
     
         public static DesiredNode fromXContent(XContentParser parser) throws IOException {
    @@ -234,7 +274,14 @@ public void toInnerXContent(XContentBuilder builder, Params params) throws IOExc
             }
             builder.field(MEMORY_FIELD.getPreferredName(), memory);
             builder.field(STORAGE_FIELD.getPreferredName(), storage);
    -        builder.field(VERSION_FIELD.getPreferredName(), version);
    +        addDeprecatedVersionField(builder);
    +    }
    +
    +    @UpdateForV9 // Remove deprecated field from response
    +    private void addDeprecatedVersionField(XContentBuilder builder) throws IOException {
    +        if (version != null) {
    +            builder.field(VERSION_FIELD.getPreferredName(), version);
    +        }
         }
     
         public boolean hasMasterRole() {
    @@ -292,10 +339,6 @@ public ByteSizeValue storage() {
             return storage;
         }
     
    -    public Version version() {
    -        return version;
    -    }
    -
         public String externalId() {
             return externalId;
         }
    @@ -356,8 +399,6 @@ public String toString() {
                 + memory
                 + ", storage="
                 + storage
    -            + ", version="
    -            + version
                 + ", externalId='"
                 + externalId
                 + '\''
    @@ -366,6 +407,10 @@ public String toString() {
                 + '}';
         }
     
    +    public boolean hasVersion() {
    +        return Strings.isNullOrBlank(version) == false;
    +    }
    +
         public record ProcessorsRange(Processors min, @Nullable Processors max) implements Writeable, ToXContentObject {
     
             private static final ParseField MIN_FIELD = new ParseField("min");
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java
    index 9c5710b91966c..92b370dec3a8b 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java
    @@ -10,7 +10,6 @@
     
     import org.elasticsearch.TransportVersion;
     import org.elasticsearch.TransportVersions;
    -import org.elasticsearch.Version;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.io.stream.Writeable;
    @@ -45,7 +44,7 @@ public record DesiredNodeWithStatus(DesiredNode desiredNode, Status status)
                     (DesiredNode.ProcessorsRange) args[2],
                     (ByteSizeValue) args[3],
                     (ByteSizeValue) args[4],
    -                (Version) args[5]
    +                (String) args[5]
                 ),
                 // An unknown status is expected during upgrades to versions >= STATUS_TRACKING_SUPPORT_VERSION
                 // the desired node status would be populated when a node in the newer version is elected as
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java
    index 742b52365c8d7..83b1c48e69eb9 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java
    @@ -26,7 +26,6 @@
     import org.elasticsearch.cluster.routing.allocation.IndexMetadataUpdater;
     import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
     import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
    -import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.collect.ImmutableOpenMap;
     import org.elasticsearch.common.compress.CompressedXContent;
     import org.elasticsearch.common.io.stream.StreamInput;
    @@ -138,14 +137,9 @@ public class IndexMetadata implements Diffable, ToXContentFragmen
             EnumSet.of(ClusterBlockLevel.WRITE)
         );
     
    -    // TODO: refactor this method after adding more downsampling metadata
    -    public boolean isDownsampledIndex() {
    -        final String sourceIndex = settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME_KEY);
    -        final String indexDownsamplingStatus = settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY);
    -        final boolean downsamplingSuccess = DownsampleTaskStatus.SUCCESS.name()
    -            .toLowerCase(Locale.ROOT)
    -            .equals(indexDownsamplingStatus != null ? indexDownsamplingStatus.toLowerCase(Locale.ROOT) : DownsampleTaskStatus.UNKNOWN);
    -        return Strings.isNullOrEmpty(sourceIndex) == false && downsamplingSuccess;
    +    @Nullable
    +    public String getDownsamplingInterval() {
    +        return settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL_KEY);
         }
     
         public enum State implements Writeable {
    @@ -1235,6 +1229,7 @@ public Index getResizeSourceIndex() {
         public static final String INDEX_DOWNSAMPLE_ORIGIN_UUID_KEY = "index.downsample.origin.uuid";
     
         public static final String INDEX_DOWNSAMPLE_STATUS_KEY = "index.downsample.status";
    +    public static final String INDEX_DOWNSAMPLE_INTERVAL_KEY = "index.downsample.interval";
         public static final Setting INDEX_DOWNSAMPLE_SOURCE_UUID = Setting.simpleString(
             INDEX_DOWNSAMPLE_SOURCE_UUID_KEY,
             Property.IndexScope,
    @@ -1277,6 +1272,14 @@ public String toString() {
             Property.InternalIndex
         );
     
    +    public static final Setting INDEX_DOWNSAMPLE_INTERVAL = Setting.simpleString(
    +        INDEX_DOWNSAMPLE_INTERVAL_KEY,
    +        "",
    +        Property.IndexScope,
    +        Property.InternalIndex,
    +        Property.PrivateIndex
    +    );
    +
         // LIFECYCLE_NAME is here an as optimization, see LifecycleSettings.LIFECYCLE_NAME and
         // LifecycleSettings.LIFECYCLE_NAME_SETTING for the 'real' version
         public static final String LIFECYCLE_NAME = "index.lifecycle.name";
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
    index 64b234c8f5d2b..4d76ead90e12a 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
    @@ -234,6 +234,16 @@ default boolean isRestorable() {
     
         private final IndexVersion oldestIndexVersion;
     
    +    // Used in the findAliases and findDataStreamAliases functions
    +    private interface AliasInfoGetter {
    +        List get(String entityName);
    +    }
    +
    +    // Used in the findAliases and findDataStreamAliases functions
    +    private interface AliasInfoSetter {
    +        void put(String entityName, List aliases);
    +    }
    +
         private Metadata(
             String clusterUUID,
             boolean clusterUUIDCommitted,
    @@ -799,11 +809,63 @@ public Map> findAllAliases(final String[] concreteIn
          * aliases then the result will not include the index's key.
          */
         public Map> findAliases(final String[] aliases, final String[] concreteIndices) {
    +        ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder();
    +
    +        AliasInfoGetter getter = index -> indices.get(index).getAliases().values().stream().toList();
    +
    +        AliasInfoSetter setter = (index, foundAliases) -> {
    +            List d = new ArrayList<>();
    +            foundAliases.forEach(i -> d.add((AliasMetadata) i));
    +            mapBuilder.put(index, d);
    +        };
    +
    +        findAliasInfo(aliases, concreteIndices, getter, setter);
    +
    +        return mapBuilder.build();
    +    }
    +
    +    /**
    +     * Finds the specific data stream aliases that match with the specified aliases directly or partially via wildcards, and
    +     * that point to the specified data streams (directly or matching data streams via wildcards).
    +     *
    +     * @param aliases The aliases to look for. Might contain include or exclude wildcards.
    +     * @param dataStreams The data streams that the aliases must point to in order to be returned
    +     * @return A map of data stream name to the list of DataStreamAlias objects that match. If a data stream does not have matching
    +     * aliases then the result will not include the data stream's key.
    +     */
    +    public Map> findDataStreamAliases(final String[] aliases, final String[] dataStreams) {
    +        ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder();
    +        Map> dataStreamAliases = dataStreamAliasesByDataStream();
    +
    +        AliasInfoGetter getter = dataStream -> dataStreamAliases.getOrDefault(dataStream, Collections.emptyList());
    +
    +        AliasInfoSetter setter = (dataStream, foundAliases) -> {
    +            List dsAliases = new ArrayList<>();
    +            foundAliases.forEach(alias -> dsAliases.add((DataStreamAlias) alias));
    +            mapBuilder.put(dataStream, dsAliases);
    +        };
    +
    +        findAliasInfo(aliases, dataStreams, getter, setter);
    +
    +        return mapBuilder.build();
    +    }
    +
    +    /**
    +     * Find the aliases that point to the specified data streams or indices. Called from findAliases or findDataStreamAliases.
    +     *
    +     * @param aliases The aliases to look for. Might contain include or exclude wildcards.
    +     * @param possibleMatches The data streams or indices that the aliases must point to in order to be returned
    +     * @param getter A function that is used to get the alises for a given data stream or index
    +     * @param setter A function that is used to keep track of the found aliases
    +     */
    +    private void findAliasInfo(final String[] aliases, final String[] possibleMatches, AliasInfoGetter getter, AliasInfoSetter setter) {
             assert aliases != null;
    -        assert concreteIndices != null;
    -        if (concreteIndices.length == 0) {
    -            return ImmutableOpenMap.of();
    +        assert possibleMatches != null;
    +        if (possibleMatches.length == 0) {
    +            return;
             }
    +
    +        // create patterns to use to search for targets
             String[] patterns = new String[aliases.length];
             boolean[] include = new boolean[aliases.length];
             for (int i = 0; i < aliases.length; i++) {
    @@ -816,14 +878,16 @@ public Map> findAliases(final String[] aliases, fina
                     include[i] = true;
                 }
             }
    +
             boolean matchAllAliases = patterns.length == 0;
    -        ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder();
    -        for (String index : concreteIndices) {
    -            IndexMetadata indexMetadata = indices.get(index);
    -            List filteredValues = new ArrayList<>();
    -            for (AliasMetadata aliasMetadata : indexMetadata.getAliases().values()) {
    +
    +        for (String index : possibleMatches) {
    +            List filteredValues = new ArrayList<>();
    +
    +            List entities = getter.get(index);
    +            for (AliasInfo aliasInfo : entities) {
                     boolean matched = matchAllAliases;
    -                String alias = aliasMetadata.alias();
    +                String alias = aliasInfo.getAlias();
                     for (int i = 0; i < patterns.length; i++) {
                         if (include[i]) {
                             if (matched == false) {
    @@ -835,16 +899,15 @@ public Map> findAliases(final String[] aliases, fina
                         }
                     }
                     if (matched) {
    -                    filteredValues.add(aliasMetadata);
    +                    filteredValues.add(aliasInfo);
                     }
                 }
                 if (filteredValues.isEmpty() == false) {
                     // Make the list order deterministic
    -                CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetadata::alias));
    -                mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
    +                CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasInfo::getAlias));
    +                setter.put(index, Collections.unmodifiableList(filteredValues));
                 }
             }
    -        return mapBuilder.build();
         }
     
         /**
    @@ -897,6 +960,15 @@ public Map findDataStreams(String... concreteIndices) {
             return builder.build();
         }
     
    +    /**
    +     * Checks whether the provided index is a data stream.
    +     */
    +    public boolean indexIsADataStream(String indexName) {
    +        final SortedMap lookup = getIndicesLookup();
    +        IndexAbstraction abstraction = lookup.get(indexName);
    +        return abstraction != null && abstraction.getType() == IndexAbstraction.Type.DATA_STREAM;
    +    }
    +
         @SuppressWarnings("unchecked")
         private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Predicate fieldPredicate) {
             if (mappingMetadata == null) {
    @@ -1264,6 +1336,25 @@ public Map dataStreamAliases() {
             return this.custom(DataStreamMetadata.TYPE, DataStreamMetadata.EMPTY).getDataStreamAliases();
         }
     
    +    /**
    +     * Return a map of DataStreamAlias objects by DataStream name
    +     * @return a map of DataStreamAlias objects by DataStream name
    +     */
    +    public Map> dataStreamAliasesByDataStream() {
    +        Map> dataStreamAliases = new HashMap<>();
    +
    +        for (DataStreamAlias dsAlias : dataStreamAliases().values()) {
    +            for (String dataStream : dsAlias.getDataStreams()) {
    +                if (dataStreamAliases.containsKey(dataStream) == false) {
    +                    dataStreamAliases.put(dataStream, new ArrayList<>());
    +                }
    +                dataStreamAliases.get(dataStream).add(dsAlias);
    +            }
    +        }
    +
    +        return dataStreamAliases;
    +    }
    +
         public NodesShutdownMetadata nodeShutdowns() {
             return custom(NodesShutdownMetadata.TYPE, NodesShutdownMetadata.EMPTY);
         }
    @@ -2432,7 +2523,7 @@ private static void collectAliasDuplicates(
                             reported = true;
                         }
                     }
    -                // This is for adding an error message for when a data steam alias has the same name as a data stream.
    +                // This is for adding an error message for when a data stream alias has the same name as a data stream.
                     if (reported == false && dataStreamMetadata != null && dataStreamMetadata.dataStreams().containsKey(alias)) {
                         duplicates.add("data stream alias and data stream have the same name (" + alias + ")");
                     }
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java
    index b24e7fbcfefab..6b09b467a017c 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java
    @@ -13,6 +13,7 @@
     import org.elasticsearch.features.NodeFeature;
     
     import java.util.Map;
    +import java.util.Set;
     
     public class MetadataFeatures implements FeatureSpecification {
         @Override
    @@ -24,4 +25,9 @@ public Map getHistoricalFeatures() {
                 Version.V_8_5_0
             );
         }
    +
    +    @Override
    +    public Set getFeatures() {
    +        return Set.of(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED);
    +    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java
    index 767516021e6c2..34f71d315f97a 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java
    @@ -654,7 +654,7 @@ private void sendVerifyShardBeforeCloseRequest(
                 if (shardRoutingTable.primaryShard().unassigned()) {
                     logger.debug("primary shard {} is unassigned, ignoring", shardId);
                     final ReplicationResponse response = new ReplicationResponse();
    -                response.setShardInfo(new ReplicationResponse.ShardInfo(shardRoutingTable.size(), shardRoutingTable.size()));
    +                response.setShardInfo(ReplicationResponse.ShardInfo.allSuccessful(shardRoutingTable.size()));
                     listener.onResponse(response);
                     return;
                 }
    @@ -786,7 +786,7 @@ private void sendVerifyShardBlockRequest(
                 if (shardRoutingTable.primaryShard().unassigned()) {
                     logger.debug("primary shard {} is unassigned, ignoring", shardId);
                     final ReplicationResponse response = new ReplicationResponse();
    -                response.setShardInfo(new ReplicationResponse.ShardInfo(shardRoutingTable.size(), shardRoutingTable.size()));
    +                response.setShardInfo(ReplicationResponse.ShardInfo.allSuccessful(shardRoutingTable.size()));
                     listener.onResponse(response);
                     return;
                 }
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java
    index 7a2d20d042f84..6307ed768e813 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java
    @@ -148,7 +148,11 @@ private static ClusterState applyRequest(
                     // try and parse it (no need to add it here) so we can bail early in case of parsing exception
                     // first, simulate: just call merge and ignore the result
                     Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource);
    -                MapperService.mergeMappings(mapperService.documentMapper(), mapping, MergeReason.MAPPING_UPDATE);
    +                MapperService.mergeMappings(
    +                    mapperService.documentMapper(),
    +                    mapping,
    +                    request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE
    +                );
                 }
                 Metadata.Builder builder = Metadata.builder(metadata);
                 boolean updated = false;
    @@ -167,7 +171,7 @@ private static ClusterState applyRequest(
                     DocumentMapper mergedMapper = mapperService.merge(
                         MapperService.SINGLE_MAPPING_NAME,
                         mappingUpdateSource,
    -                    MergeReason.MAPPING_UPDATE
    +                    request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE
                     );
                     CompressedXContent updatedSource = mergedMapper.mappingSource();
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
    index aaf256a49a0a5..3453b3b6d70ff 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
    @@ -35,8 +35,8 @@
     public class SingleNodeShutdownMetadata implements SimpleDiffable, ToXContentObject {
     
         public static final TransportVersion REPLACE_SHUTDOWN_TYPE_ADDED_VERSION = TransportVersions.V_7_16_0;
    -    public static final TransportVersion SIGTERM_ADDED_VERSION = TransportVersions.V_8_500_020;
    -    public static final TransportVersion GRACE_PERIOD_ADDED_VERSION = TransportVersions.V_8_500_020;
    +    public static final TransportVersion SIGTERM_ADDED_VERSION = TransportVersions.V_8_9_X;
    +    public static final TransportVersion GRACE_PERIOD_ADDED_VERSION = TransportVersions.V_8_9_X;
     
         public static final ParseField NODE_ID_FIELD = new ParseField("node_id");
         public static final ParseField TYPE_FIELD = new ParseField("type");
    diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java
    index d36b70b49c6ab..18a99f984707f 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java
    @@ -123,7 +123,7 @@ public Template(StreamInput in) throws IOException {
             }
             if (in.getTransportVersion().onOrAfter(DataStreamLifecycle.ADDED_ENABLED_FLAG_VERSION)) {
                 this.lifecycle = in.readOptionalWriteable(DataStreamLifecycle::new);
    -        } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 boolean isExplicitNull = in.readBoolean();
                 if (isExplicitNull) {
                     this.lifecycle = DataStreamLifecycle.newBuilder().enabled(false).build();
    @@ -177,7 +177,7 @@ public void writeTo(StreamOutput out) throws IOException {
             }
             if (out.getTransportVersion().onOrAfter(DataStreamLifecycle.ADDED_ENABLED_FLAG_VERSION)) {
                 out.writeOptionalWriteable(lifecycle);
    -        } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 boolean isExplicitNull = lifecycle != null && lifecycle.isEnabled() == false;
                 out.writeBoolean(isExplicitNull);
                 if (isExplicitNull == false) {
    diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
    index e77a7b27e1a2c..01b67068db31f 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
    @@ -337,7 +337,7 @@ public DiscoveryNode(StreamInput in) throws IOException {
                 }
             }
             this.roles = Collections.unmodifiableSortedSet(roles);
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) {
                 versionInfo = new VersionInformation(Version.readVersion(in), IndexVersion.readVersion(in), IndexVersion.readVersion(in));
             } else {
                 versionInfo = inferVersionInformation(Version.readVersion(in));
    @@ -374,7 +374,7 @@ public void writeTo(StreamOutput out) throws IOException {
                 o.writeString(role.roleNameAbbreviation());
                 o.writeBoolean(role.canContainData());
             });
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) {
                 Version.writeVersion(versionInfo.nodeVersion(), out);
                 IndexVersion.writeVersion(versionInfo.minIndexVersion(), out);
                 IndexVersion.writeVersion(versionInfo.maxIndexVersion(), out);
    diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
    index cd2c927d87f69..918056fea9ec6 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
    @@ -667,7 +667,7 @@ public String shortSummary() {
         @Override
         public void writeTo(StreamOutput out) throws IOException {
             out.writeOptionalString(masterNodeId);
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeVLong(nodeLeftGeneration);
             } // else nodeLeftGeneration is zero, or we're sending this to a remote cluster which does not care about the nodeLeftGeneration
             out.writeCollection(nodes.values());
    @@ -682,7 +682,7 @@ public static DiscoveryNodes readFrom(StreamInput in, DiscoveryNode localNode) t
                 builder.localNodeId(localNode.getId());
             }
     
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 builder.nodeLeftGeneration(in.readVLong());
             } // else nodeLeftGeneration is zero, or we're receiving this from a remote cluster so the nodeLeftGeneration does not matter to us
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java
    index bb0ca372e6a4c..76efc62e3ca06 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java
    @@ -186,10 +186,10 @@ public void onNewInfo(ClusterInfo info) {
                 final String node = entry.getKey();
                 final DiskUsage usage = entry.getValue();
                 final RoutingNode routingNode = routingNodes.node(node);
    -            final ByteSizeValue total = ByteSizeValue.ofBytes(usage.getTotalBytes());
    +            final ByteSizeValue total = ByteSizeValue.ofBytes(usage.totalBytes());
     
                 if (isDedicatedFrozenNode(routingNode)) {
    -                if (usage.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdFrozenFloodStage(total).getBytes()) {
    +                if (usage.freeBytes() < diskThresholdSettings.getFreeBytesThresholdFrozenFloodStage(total).getBytes()) {
                         logger.warn(
                             "flood stage disk watermark [{}] exceeded on {}",
                             diskThresholdSettings.describeFrozenFloodStageThreshold(total, false),
    @@ -201,7 +201,7 @@ public void onNewInfo(ClusterInfo info) {
                     continue;
                 }
     
    -            if (usage.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdFloodStage(total).getBytes()) {
    +            if (usage.freeBytes() < diskThresholdSettings.getFreeBytesThresholdFloodStage(total).getBytes()) {
                     nodesOverLowThreshold.add(node);
                     nodesOverHighThreshold.add(node);
                     nodesOverHighThresholdAndRelocating.remove(node);
    @@ -223,7 +223,7 @@ public void onNewInfo(ClusterInfo info) {
                     continue;
                 }
     
    -            if (usage.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total).getBytes()) {
    +            if (usage.freeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total).getBytes()) {
                     if (routingNode != null) { // might be temporarily null if the ClusterInfoService and the ClusterService are out of step
                         for (ShardRouting routing : routingNode) {
                             String indexName = routing.index().getName();
    @@ -232,16 +232,16 @@ public void onNewInfo(ClusterInfo info) {
                     }
                 }
     
    -            final long reservedSpace = info.getReservedSpace(usage.getNodeId(), usage.getPath()).total();
    +            final long reservedSpace = info.getReservedSpace(usage.nodeId(), usage.path()).total();
                 final DiskUsage usageWithReservedSpace = new DiskUsage(
    -                usage.getNodeId(),
    -                usage.getNodeName(),
    -                usage.getPath(),
    -                usage.getTotalBytes(),
    -                Math.max(0L, usage.getFreeBytes() - reservedSpace)
    +                usage.nodeId(),
    +                usage.nodeName(),
    +                usage.path(),
    +                usage.totalBytes(),
    +                Math.max(0L, usage.freeBytes() - reservedSpace)
                 );
     
    -            if (usageWithReservedSpace.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total).getBytes()) {
    +            if (usageWithReservedSpace.freeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total).getBytes()) {
                     nodesOverLowThreshold.add(node);
                     nodesOverHighThreshold.add(node);
     
    @@ -258,7 +258,7 @@ public void onNewInfo(ClusterInfo info) {
                         );
                     }
     
    -            } else if (usageWithReservedSpace.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdLowStage(total).getBytes()) {
    +            } else if (usageWithReservedSpace.freeBytes() < diskThresholdSettings.getFreeBytesThresholdLowStage(total).getBytes()) {
                     nodesOverHighThresholdAndRelocating.remove(node);
     
                     final boolean wasUnderLowThreshold = nodesOverLowThreshold.add(node);
    @@ -321,33 +321,33 @@ public void onNewInfo(ClusterInfo info) {
                         ActionListener.releaseAfter(ActionListener.runAfter(ActionListener.wrap(ignored -> {
                             final var reroutedClusterState = clusterStateSupplier.get();
                             for (DiskUsage diskUsage : usagesOverHighThreshold) {
    -                            final RoutingNode routingNode = reroutedClusterState.getRoutingNodes().node(diskUsage.getNodeId());
    +                            final RoutingNode routingNode = reroutedClusterState.getRoutingNodes().node(diskUsage.nodeId());
                                 final DiskUsage usageIncludingRelocations;
                                 final long relocatingShardsSize;
                                 if (routingNode != null) { // might be temporarily null if ClusterInfoService and ClusterService are out of step
                                     relocatingShardsSize = sizeOfRelocatingShards(routingNode, diskUsage, info, reroutedClusterState);
                                     usageIncludingRelocations = new DiskUsage(
    -                                    diskUsage.getNodeId(),
    -                                    diskUsage.getNodeName(),
    -                                    diskUsage.getPath(),
    -                                    diskUsage.getTotalBytes(),
    -                                    diskUsage.getFreeBytes() - relocatingShardsSize
    +                                    diskUsage.nodeId(),
    +                                    diskUsage.nodeName(),
    +                                    diskUsage.path(),
    +                                    diskUsage.totalBytes(),
    +                                    diskUsage.freeBytes() - relocatingShardsSize
                                     );
                                 } else {
                                     usageIncludingRelocations = diskUsage;
                                     relocatingShardsSize = 0L;
                                 }
    -                            final ByteSizeValue total = ByteSizeValue.ofBytes(usageIncludingRelocations.getTotalBytes());
    +                            final ByteSizeValue total = ByteSizeValue.ofBytes(usageIncludingRelocations.totalBytes());
     
    -                            if (usageIncludingRelocations.getFreeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total)
    +                            if (usageIncludingRelocations.freeBytes() < diskThresholdSettings.getFreeBytesThresholdHighStage(total)
                                     .getBytes()) {
    -                                nodesOverHighThresholdAndRelocating.remove(diskUsage.getNodeId());
    +                                nodesOverHighThresholdAndRelocating.remove(diskUsage.nodeId());
                                     logger.warn("""
                                         high disk watermark [{}] exceeded on {}, shards will be relocated away from this node; currently \
                                         relocating away shards totalling [{}] bytes; the node is expected to continue to exceed the high disk \
                                         watermark when these relocations are complete\
                                         """, diskThresholdSettings.describeHighThreshold(total, false), diskUsage, -relocatingShardsSize);
    -                            } else if (nodesOverHighThresholdAndRelocating.add(diskUsage.getNodeId())) {
    +                            } else if (nodesOverHighThresholdAndRelocating.add(diskUsage.nodeId())) {
                                     logger.info("""
                                         high disk watermark [{}] exceeded on {}, shards will be relocated away from this node; currently \
                                         relocating away shards totalling [{}] bytes; the node is expected to be below the high disk watermark \
    @@ -424,7 +424,7 @@ long sizeOfRelocatingShards(RoutingNode routingNode, DiskUsage diskUsage, Cluste
             return DiskThresholdDecider.sizeOfUnaccountedShards(
                 routingNode,
                 true,
    -            diskUsage.getPath(),
    +            diskUsage.path(),
                 info,
                 SnapshotShardSizeInfo.EMPTY,
                 reroutedClusterState.metadata(),
    diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java
    index 4549858c2508b..569335cc65a5d 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java
    @@ -79,9 +79,11 @@ public static final class Rebalance {
                 activities. The shard will be rebalanced when those activities finish. Please wait.""";
     
             public static final String CANNOT_REBALANCE_CAN_ALLOCATE = """
    -            Elasticsearch is allowed to allocate this shard to another node but it isn't allowed to rebalance the shard there. If you \
    -            expect this shard to be rebalanced to another node, find this node in the node-by-node explanation and address the reasons \
    -            which prevent Elasticsearch from rebalancing this shard there.""";
    +            Elasticsearch is allowed to allocate this shard on another node, and there is at least one node to which it could move this \
    +            shard that would improve the overall cluster balance, but it isn't allowed to rebalance this shard there. If you expect this \
    +            shard to be rebalanced to another node, check the cluster-wide rebalancing decisions and address any reasons preventing \
    +            Elasticsearch from rebalancing shards within the cluster, and then find the expected node in the node-by-node explanation and \
    +            address the reasons which prevent Elasticsearch from moving this shard there.""";
     
             public static final String CANNOT_REBALANCE_CANNOT_ALLOCATE = """
                 Elasticsearch is not allowed to allocate or rebalance this shard to another node. If you expect this shard to be rebalanced to \
    diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java
    index 4e674648bc3a4..3c0125272b094 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java
    @@ -160,7 +160,7 @@ private static Map unaccountedSearchableSnapshotSizes(ClusterState
             if (clusterInfo != null) {
                 for (RoutingNode node : clusterState.getRoutingNodes()) {
                     DiskUsage usage = clusterInfo.getNodeMostAvailableDiskUsages().get(node.nodeId());
    -                ClusterInfo.ReservedSpace reservedSpace = clusterInfo.getReservedSpace(node.nodeId(), usage != null ? usage.getPath() : "");
    +                ClusterInfo.ReservedSpace reservedSpace = clusterInfo.getReservedSpace(node.nodeId(), usage != null ? usage.path() : "");
                     long totalSize = 0;
                     for (ShardRouting shard : node.started()) {
                         if (shard.getExpectedShardSize() > 0
    diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
    index 22bed76fa2b2e..952b2fbf8f2e2 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
    @@ -472,9 +472,9 @@ private static DiskUsageWithRelocations getDiskUsage(
                 logger.debug(
                     "unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]",
                     node.nodeId(),
    -                usage.getTotalBytes(),
    -                usage.getFreeBytes(),
    -                usage.getFreeDiskAsPercentage()
    +                usage.totalBytes(),
    +                usage.freeBytes(),
    +                usage.freeDiskAsPercentage()
                 );
             }
     
    @@ -483,7 +483,7 @@ private static DiskUsageWithRelocations getDiskUsage(
                 sizeOfUnaccountedShards(
                     node,
                     subtractLeavingShards,
    -                usage.getPath(),
    +                usage.path(),
                     allocation.clusterInfo(),
                     allocation.snapshotShardSizeInfo(),
                     allocation.metadata(),
    @@ -509,8 +509,8 @@ static DiskUsage averageUsage(RoutingNode node, Map usages) {
             long totalBytes = 0;
             long freeBytes = 0;
             for (DiskUsage du : usages.values()) {
    -            totalBytes += du.getTotalBytes();
    -            freeBytes += du.getFreeBytes();
    +            totalBytes += du.totalBytes();
    +            freeBytes += du.freeBytes();
             }
             return new DiskUsage(node.nodeId(), node.node().getName(), "_na_", totalBytes / usages.size(), freeBytes / usages.size());
         }
    @@ -548,18 +548,18 @@ record DiskUsageWithRelocations(DiskUsage diskUsage, long relocatingShardSize) {
     
             long getFreeBytes() {
                 try {
    -                return Math.subtractExact(diskUsage.getFreeBytes(), relocatingShardSize);
    +                return Math.subtractExact(diskUsage.freeBytes(), relocatingShardSize);
                 } catch (ArithmeticException e) {
                     return Long.MAX_VALUE;
                 }
             }
     
             String getPath() {
    -            return diskUsage.getPath();
    +            return diskUsage.path();
             }
     
             long getTotalBytes() {
    -            return diskUsage.getTotalBytes();
    +            return diskUsage.totalBytes();
             }
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java
    index 2d3be237c8e94..ebc725b22c9e1 100644
    --- a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java
    +++ b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java
    @@ -101,7 +101,7 @@ public static CompatibilityVersions readVersion(StreamInput in) throws IOExcepti
             TransportVersion transportVersion = TransportVersion.readVersion(in);
     
             Map mappingsVersions = Map.of();
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.COMPAT_VERSIONS_MAPPING_VERSION_ADDED)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) {
                 mappingsVersions = in.readMap(SystemIndexDescriptor.MappingsVersion::new);
             }
     
    @@ -112,7 +112,7 @@ public static CompatibilityVersions readVersion(StreamInput in) throws IOExcepti
         public void writeTo(StreamOutput out) throws IOException {
             TransportVersion.writeVersion(this.transportVersion(), out);
     
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.COMPAT_VERSIONS_MAPPING_VERSION_ADDED)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) {
                 out.writeMap(this.systemIndexMappingsVersion(), (o, v) -> v.writeTo(o));
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java
    index 80c969cc1b084..67a9e23f2297f 100644
    --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java
    +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java
    @@ -69,6 +69,7 @@ public enum ReferenceDocs {
         BOOTSTRAP_CHECK_TLS,
         BOOTSTRAP_CHECK_TOKEN_SSL,
         BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP,
    +    CONTACT_SUPPORT,
         // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner
         ;
     
    diff --git a/server/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java b/server/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java
    index 33bd1bd927605..f7f7f520fec90 100644
    --- a/server/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java
    +++ b/server/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java
    @@ -35,6 +35,8 @@ class TimeBasedUUIDGenerator implements UUIDGenerator {
             assert SECURE_MUNGED_ADDRESS.length == 6;
         }
     
    +    private static final Base64.Encoder BASE_64_NO_PADDING = Base64.getUrlEncoder().withoutPadding();
    +
         // protected for testing
         protected long currentTimeMillis() {
             return System.currentTimeMillis();
    @@ -48,22 +50,16 @@ protected byte[] macAddress() {
         @Override
         public String getBase64UUID() {
             final int sequenceId = sequenceNumber.incrementAndGet() & 0xffffff;
    -        long currentTimeMillis = currentTimeMillis();
    -
    -        long timestamp = this.lastTimestamp.updateAndGet(lastTimestamp -> {
    -            // Don't let timestamp go backwards, at least "on our watch" (while this JVM is running). We are
    -            // still vulnerable if we are shut down, clock goes backwards, and we restart... for this we
    -            // randomize the sequenceNumber on init to decrease chance of collision:
    -            long nonBackwardsTimestamp = Math.max(lastTimestamp, currentTimeMillis);
    -
    -            if (sequenceId == 0) {
    -                // Always force the clock to increment whenever sequence number is 0, in case we have a long
    -                // time-slip backwards:
    -                nonBackwardsTimestamp++;
    -            }
     
    -            return nonBackwardsTimestamp;
    -        });
    +        // Don't let timestamp go backwards, at least "on our watch" (while this JVM is running). We are
    +        // still vulnerable if we are shut down, clock goes backwards, and we restart... for this we
    +        // randomize the sequenceNumber on init to decrease chance of collision:
    +        long timestamp = this.lastTimestamp.accumulateAndGet(
    +            currentTimeMillis(),
    +            // Always force the clock to increment whenever sequence number is 0, in case we have a long
    +            // time-slip backwards:
    +            sequenceId == 0 ? (lastTimestamp, currentTimeMillis) -> Math.max(lastTimestamp, currentTimeMillis) + 1 : Math::max
    +        );
     
             final byte[] uuidBytes = new byte[15];
             int i = 0;
    @@ -106,6 +102,6 @@ public String getBase64UUID() {
     
             assert i == uuidBytes.length;
     
    -        return Base64.getUrlEncoder().withoutPadding().encodeToString(uuidBytes);
    +        return BASE_64_NO_PADDING.encodeToString(uuidBytes);
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
    index 09ccab35d1e43..b63d722df9b4e 100644
    --- a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
    +++ b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
    @@ -148,16 +148,17 @@ public BytesReference slice(int from, int length) {
             // for slices we only need to find the start and the end reference
             // adjust them and pass on the references in between as they are fully contained
             final int to = from + length;
    -        final int limit = getOffsetIndex(to - 1);
             final int start = getOffsetIndex(from);
    -        final BytesReference[] inSlice = new BytesReference[1 + (limit - start)];
    -        for (int i = 0, j = start; i < inSlice.length; i++) {
    -            inSlice[i] = references[j++];
    +        int limit = start;
    +        for (int i = start + 1; i < offsets.length && offsets[i] < to; i++) {
    +            limit = i;
             }
             int inSliceOffset = from - offsets[start];
    -        if (inSlice.length == 1) {
    -            return inSlice[0].slice(inSliceOffset, length);
    +        if (start == limit) {
    +            return references[start].slice(inSliceOffset, length);
             }
    +        final BytesReference[] inSlice = new BytesReference[1 + (limit - start)];
    +        System.arraycopy(references, start, inSlice, 0, inSlice.length);
             // now adjust slices in front and at the end
             inSlice[0] = inSlice[0].slice(inSliceOffset, inSlice[0].length() - inSliceOffset);
             inSlice[inSlice.length - 1] = inSlice[inSlice.length - 1].slice(0, to - offsets[limit]);
    diff --git a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java
    index 905373f9400f6..567f39d968200 100644
    --- a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java
    +++ b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java
    @@ -32,7 +32,6 @@ public final class ReleasableBytesReference implements RefCounted, Releasable, B
         private final RefCounted refCounted;
     
         public static ReleasableBytesReference empty() {
    -        EMPTY.incRef();
             return EMPTY;
         }
     
    @@ -147,6 +146,9 @@ public StreamInput streamInput() throws IOException {
             assert hasReferences();
             return new BytesReferenceStreamInput(this) {
                 private ReleasableBytesReference retainAndSkip(int len) throws IOException {
    +                if (len == 0) {
    +                    return ReleasableBytesReference.empty();
    +                }
                     // instead of reading the bytes from a stream we just create a slice of the underlying bytes
                     final ReleasableBytesReference result = retainedSlice(offset(), len);
                     // move the stream manually since creating the slice didn't move it
    @@ -156,7 +158,7 @@ private ReleasableBytesReference retainAndSkip(int len) throws IOException {
     
                 @Override
                 public ReleasableBytesReference readReleasableBytesReference() throws IOException {
    -                final int len = readArraySize();
    +                final int len = readVInt();
                     return retainAndSkip(len);
                 }
     
    diff --git a/server/src/main/java/org/elasticsearch/common/geo/GenericPointParser.java b/server/src/main/java/org/elasticsearch/common/geo/GenericPointParser.java
    index 681849a9851ec..7c12b1f2cce29 100644
    --- a/server/src/main/java/org/elasticsearch/common/geo/GenericPointParser.java
    +++ b/server/src/main/java/org/elasticsearch/common/geo/GenericPointParser.java
    @@ -30,7 +30,6 @@ public abstract class GenericPointParser {
         private static final String X_FIELD = "x";
         private static final String Y_FIELD = "y";
         private static final String Z_FIELD = "z";
    -    private static final String GEOHASH = "geohash";
         private static final String TYPE = "type";
         private static final String COORDINATES = "coordinates";
         private final Map> fields;
    @@ -112,9 +111,8 @@ List parseField(XContentSubParser subParser) throws IOException {
          * @param mapType whether the parser is for 'geo_point' or 'point'
          * @param xField the name of the first coordinate when constructing points (either 'x' or 'lat')
          * @param yField the name of the second coordinate when constructing points (either 'y' or 'lon')
    -     * @param supportGeohash whether to support parsing geohash values (only geo_point supports this currently)
          */
    -    public GenericPointParser(String mapType, String xField, String yField, boolean supportGeohash) {
    +    public GenericPointParser(String mapType, String xField, String yField) {
             this.mapType = mapType;
             this.xField = xField;
             this.yField = yField;
    @@ -124,9 +122,6 @@ public GenericPointParser(String mapType, String xField, String yField, boolean
             fields.put(Z_FIELD, new DoubleFieldParser(Z_FIELD, Z_FIELD));
             fields.put(TYPE, new StringFieldParser(TYPE));
             fields.put(COORDINATES, new DoubleArrayFieldParser(COORDINATES));
    -        if (supportGeohash) {
    -            fields.put(GEOHASH, new StringFieldParser(GEOHASH));
    -        }
         }
     
         public abstract void assertZValue(boolean ignoreZValue, double zValue);
    @@ -142,11 +137,10 @@ public GenericPointParser(String mapType, String xField, String yField, boolean
          * @param ignoreZValue {@link XContentParser} to not throw an error if 3 dimensional data is provided
          * @return new Point parsed from the parser
          */
    -    public T parsePoint(XContentParser parser, boolean ignoreZValue, Function fromString, Function fromGeohash)
    -        throws IOException, ElasticsearchParseException {
    +    public T parsePoint(XContentParser parser, boolean ignoreZValue, Function fromString) throws IOException,
    +        ElasticsearchParseException {
             double x = Double.NaN;
             double y = Double.NaN;
    -        String geohash = null;
             String geojsonType = null;
             List coordinates = null;
     
    @@ -162,7 +156,6 @@ public T parsePoint(XContentParser parser, boolean ignoreZValue, Function x = (Double) fieldParser.parseField(subParser);
                                     case Y_FIELD -> y = (Double) fieldParser.parseField(subParser);
                                     case Z_FIELD -> assertZValue(ignoreZValue, (Double) fieldParser.parseField(subParser));
    -                                case GEOHASH -> geohash = (String) fieldParser.parseField(subParser);
                                     case TYPE -> geojsonType = (String) fieldParser.parseField(subParser);
                                     case COORDINATES -> coordinates = ((DoubleArrayFieldParser) fieldParser).parseField(subParser);
                                 }
    @@ -175,16 +168,7 @@ public T parsePoint(XContentParser parser, boolean ignoreZValue, Function();
    -        if (geohash) found.add("geohash");
    -        if (xy) found.add(xField + "/" + yField);
    -        if (geojson) found.add("GeoJSON");
    -        if (found.size() > 1) {
    -            throw new ElasticsearchParseException("fields matching more than one point format found: {}", found);
    -        } else if (geohash) {
    -            if (x || y || type || coordinates) {
    -                throw new ElasticsearchParseException(fieldError());
    -            }
    -        } else if (found.size() == 0) {
    +    private void assertOnlyOneFormat(boolean x, boolean y, boolean coordinates, boolean type) {
    +        final boolean xy = x && y;
    +        final boolean geojson = coordinates && type;
    +        if (xy && geojson) {
    +            throw new ElasticsearchParseException("fields matching more than one point format");
    +        } else if ((xy || geojson) == false) {
                 if (x) {
                     throw new ElasticsearchParseException("Required [{}]", yField);
                 } else if (y) {
    diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java b/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java
    index 7b354bfa767f2..467dfa5aa3170 100644
    --- a/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java
    +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java
    @@ -100,7 +100,7 @@ public final String getWriteableName() {
     
         @Override
         public final TransportVersion getMinimalSupportedVersion() {
    -        return TransportVersions.GENERIC_NAMED_WRITABLE_ADDED;
    +        return TransportVersions.V_8_11_X;
         }
     
         protected static class GeoBoundsParser extends BoundsParser {
    diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
    index ceff7bf41c587..61f1641e9d312 100644
    --- a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
    +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
    @@ -410,18 +410,14 @@ public static GeoPoint parseGeoPoint(XContentParser parser, final boolean ignore
          */
         public static GeoPoint parseGeoPoint(XContentParser parser, final boolean ignoreZValue, final EffectivePoint effectivePoint)
             throws IOException, ElasticsearchParseException {
    -        return geoPointParser.parsePoint(parser, ignoreZValue, value -> {
    -            GeoPoint point = new GeoPoint();
    -            point.resetFromString(value, ignoreZValue, effectivePoint);
    -            return point;
    -        }, value -> {
    -            GeoPoint point = new GeoPoint();
    -            point.parseGeoHash(value, effectivePoint);
    -            return point;
    -        });
    -    }
    -
    -    private static GenericPointParser geoPointParser = new GenericPointParser<>("geo_point", "lon", "lat", true) {
    +        return geoPointParser.parsePoint(
    +            parser,
    +            ignoreZValue,
    +            value -> new GeoPoint().resetFromString(value, ignoreZValue, effectivePoint)
    +        );
    +    }
    +
    +    private static final GenericPointParser geoPointParser = new GenericPointParser<>("geo_point", "lon", "lat") {
     
             @Override
             public void assertZValue(boolean ignoreZValue, double zValue) {
    diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
    index 693d8efb18347..a0b62bdabc08b 100644
    --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
    +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
    @@ -49,7 +49,7 @@
     import java.util.function.IntFunction;
     
     import static java.util.Map.entry;
    -import static org.elasticsearch.TransportVersions.GENERIC_NAMED_WRITABLE_ADDED;
    +import static org.elasticsearch.TransportVersions.V_8_11_X;
     
     /**
      * A stream from another node to this node. Technically, it can also be streamed from a byte array but that is mostly for testing.
    @@ -768,8 +768,7 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep
                 // Note that we do not rely on the checks in VersionCheckingStreamOutput because that only applies to CCS
                 final var genericNamedWriteable = (GenericNamedWriteable) v;
                 TransportVersion minSupportedVersion = genericNamedWriteable.getMinimalSupportedVersion();
    -            assert minSupportedVersion.onOrAfter(GENERIC_NAMED_WRITABLE_ADDED)
    -                : "[GenericNamedWriteable] requires [" + GENERIC_NAMED_WRITABLE_ADDED + "]";
    +            assert minSupportedVersion.onOrAfter(V_8_11_X) : "[GenericNamedWriteable] requires [" + V_8_11_X + "]";
                 if (o.getTransportVersion().before(minSupportedVersion)) {
                     final var message = Strings.format(
                         "[%s] requires minimal transport version [%s] and cannot be sent using transport version [%s]",
    diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
    index d3422c1b51a22..5da8c282ad4bc 100644
    --- a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
    +++ b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
    @@ -8,8 +8,6 @@
     
     package org.elasticsearch.common.io.stream;
     
    -import org.elasticsearch.action.support.TransportAction;
    -
     import java.io.IOException;
     
     /**
    @@ -27,7 +25,7 @@ public interface Writeable {
         /**
          * Reference to a method that can write some object to a {@link StreamOutput}.
          * 

    - * By convention this is a method from {@link StreamOutput} itself (e.g., {@link StreamOutput#writeString}). If the value can be + * By convention this is a method from {@link StreamOutput} itself (e.g., {@link StreamOutput#writeString(String)}. If the value can be * {@code null}, then the "optional" variant of methods should be used! *

    * Most classes should implement {@link Writeable} and the {@link Writeable#writeTo(StreamOutput)} method should use @@ -75,14 +73,6 @@ interface Reader { * @param in Input to read the value from */ V read(StreamInput in) throws IOException; - - /** - * A {@link Reader} which must never be called, for use in local-only transport actions. See also {@link TransportAction#localOnly}. - */ - // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved - static Reader localOnly() { - return in -> TransportAction.localOnly(); - } } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index f7c9e72d36326..c1b8d51c255db 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -76,6 +76,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME, IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_UUID, IndexMetadata.INDEX_DOWNSAMPLE_STATUS, + IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL, SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index f40534749f017..f6dd5532a3aea 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -1073,6 +1073,9 @@ public Map getAsMap(Settings settings) { matchStream(settings).distinct().forEach(key -> { String namespace = this.key.getNamespace(key); Setting concreteSetting = getConcreteSetting(namespace, key); + if (map.containsKey(namespace) && this.key.isFallback(key)) { + return; + } map.put(namespace, concreteSetting.get(settings)); }); return Collections.unmodifiableMap(map); @@ -2053,7 +2056,20 @@ public int hashCode() { */ public static AffixSetting prefixKeySetting(String prefix, Function> delegateFactory) { BiFunction> delegateFactoryWithNamespace = (ns, k) -> delegateFactory.apply(k); - return affixKeySetting(new AffixKey(prefix), delegateFactoryWithNamespace); + return affixKeySetting(new AffixKey(prefix, null, null), delegateFactoryWithNamespace); + } + + /** + * Same as above but also matches the fallback prefix in addition to the prefix of the setting. + * @param nsDelegateFactory instantiate a setting given the namespace and the qualified key + */ + public static AffixSetting prefixKeySetting( + String prefix, + String fallbackPrefix, + BiFunction> nsDelegateFactory + ) { + Setting delegate = nsDelegateFactory.apply("_na_", "_na_"); + return new AffixSetting<>(new AffixKey(prefix, null, fallbackPrefix), delegate, nsDelegateFactory); } /** @@ -2068,7 +2084,7 @@ public static AffixSetting affixKeySetting( AffixSettingDependency... dependencies ) { BiFunction> delegateFactoryWithNamespace = (ns, k) -> delegateFactory.apply(k); - return affixKeySetting(new AffixKey(prefix, suffix), delegateFactoryWithNamespace, dependencies); + return affixKeySetting(new AffixKey(prefix, suffix, null), delegateFactoryWithNamespace, dependencies); } public static AffixSetting affixKeySetting( @@ -2078,7 +2094,7 @@ public static AffixSetting affixKeySetting( AffixSettingDependency... dependencies ) { Setting delegate = delegateFactory.apply("_na_", "_na_"); - return new AffixSetting<>(new AffixKey(prefix, suffix), delegate, delegateFactory, dependencies); + return new AffixSetting<>(new AffixKey(prefix, suffix, null), delegate, delegateFactory, dependencies); } private static AffixSetting affixKeySetting( @@ -2159,28 +2175,39 @@ public boolean match(String toTest) { */ public static final class AffixKey implements Key { private final Pattern pattern; + private final Pattern fallbackPattern; private final String prefix; private final String suffix; - + private final String fallbackPrefix; private final String keyString; - AffixKey(String prefix) { - this(prefix, null); - } - - AffixKey(String prefix, String suffix) { + AffixKey(String prefix, String suffix, String fallbackPrefix) { assert prefix != null || suffix != null : "Either prefix or suffix must be non-null"; + assert fallbackPrefix == null || prefix != null : "prefix must be non-null if fallbackPrefix is non-null"; this.prefix = prefix; if (prefix.endsWith(".") == false) { throw new IllegalArgumentException("prefix must end with a '.'"); } + + String prefixPattern; + this.fallbackPrefix = fallbackPrefix; + if (fallbackPrefix != null) { + if (fallbackPrefix.endsWith(".") == false) { + throw new IllegalArgumentException("prefix must end with a '.'"); + } + fallbackPattern = Pattern.compile("(" + Pattern.quote(fallbackPrefix) + ")" + "((?:[-\\w]+[.])*[-\\w]+$)"); + prefixPattern = "(" + Pattern.quote(prefix) + "|" + Pattern.quote(fallbackPrefix) + ")"; + } else { + fallbackPattern = null; + prefixPattern = "(" + Pattern.quote(prefix) + ")"; + } this.suffix = suffix; if (suffix == null) { - pattern = Pattern.compile("(" + Pattern.quote(prefix) + "((?:[-\\w]+[.])*[-\\w]+$))"); + pattern = Pattern.compile("(" + prefixPattern + "((?:[-\\w]+[.])*[-\\w]+$))"); } else { // the last part of this regexp is to support both list and group keys - pattern = Pattern.compile("(" + Pattern.quote(prefix) + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\..*)?"); + pattern = Pattern.compile("(" + prefixPattern + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\..*)?"); } StringBuilder sb = new StringBuilder(); sb.append(prefix); @@ -2197,6 +2224,20 @@ public boolean match(String key) { return pattern.matcher(key).matches(); } + /** + * Does this key have a fallback prefix? + */ + private boolean hasFallback() { + return fallbackPattern != null; + } + + /** + * Does the key start with the fallback prefix? + */ + public boolean isFallback(String key) { + return hasFallback() && fallbackPattern.matcher(key).matches(); + } + /** * Returns a string representation of the concrete setting key */ @@ -2209,14 +2250,14 @@ String getConcreteString(String key) { } /** - * Returns a string representation of the concrete setting key + * Returns a string representation of the namespace, without prefix and suffix, of the affix key */ String getNamespace(String key) { Matcher matcher = pattern.matcher(key); if (matcher.matches() == false) { - throw new IllegalStateException("can't get concrete string for key " + key + " key doesn't match"); + throw new IllegalStateException("can't get namespace for key " + key + " key doesn't match"); } - return Settings.internKeyOrValue(matcher.group(2)); + return Settings.internKeyOrValue(matcher.group(3)); } public SimpleKey toConcreteKey(String missingPart) { @@ -2242,12 +2283,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AffixKey that = (AffixKey) o; - return Objects.equals(prefix, that.prefix) && Objects.equals(suffix, that.suffix); + return Objects.equals(prefix, that.prefix) + && Objects.equals(suffix, that.suffix) + && Objects.equals(fallbackPrefix, that.fallbackPrefix); } @Override public int hashCode() { - return Objects.hash(prefix, suffix); + return Objects.hash(prefix, suffix, fallbackPrefix); } } } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java index e19d828648782..eac85bfbf5740 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java @@ -8,7 +8,6 @@ package org.elasticsearch.common.util.concurrent; -import java.util.Collections; import java.util.Deque; import java.util.Queue; import java.util.Set; @@ -46,7 +45,7 @@ public static ConcurrentMap newConcurrentMap() { } public static Set newConcurrentSet() { - return Collections.newSetFromMap(ConcurrentCollections.newConcurrentMap()); + return ConcurrentHashMap.newKeySet(); } public static Queue newQueue() { diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsAbortPolicy.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsAbortPolicy.java index 192769591aded..52bd736f2bcf4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsAbortPolicy.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsAbortPolicy.java @@ -8,26 +8,27 @@ package org.elasticsearch.common.util.concurrent; -import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadPoolExecutor; public class EsAbortPolicy extends EsRejectedExecutionHandler { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { - if (r instanceof AbstractRunnable) { - if (((AbstractRunnable) r).isForceExecution()) { - BlockingQueue queue = executor.getQueue(); - if ((queue instanceof SizeBlockingQueue) == false) { - throw new IllegalStateException("forced execution, but expected a size queue"); + if (r instanceof AbstractRunnable abstractRunnable) { + if (abstractRunnable.isForceExecution()) { + if (executor.getQueue() instanceof SizeBlockingQueue sizeBlockingQueue) { + try { + sizeBlockingQueue.forcePut(r); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IllegalStateException("forced execution, but got interrupted", e); + } + if ((executor.isShutdown() && sizeBlockingQueue.remove(r)) == false) { + return; + } // else fall through and reject the task since the executor is shut down + } else { + throw new IllegalStateException("expected but did not find SizeBlockingQueue: " + executor); } - try { - ((SizeBlockingQueue) queue).forcePut(r); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("forced execution, but got interrupted", e); - } - return; } } incrementRejections(); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index d7774d5c0a7ea..5fcb4684d3f8d 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -126,11 +126,14 @@ public static EsThreadPoolExecutor newFixed( ThreadContext contextHolder, TaskTrackingConfig config ) { - BlockingQueue queue; + final BlockingQueue queue; + final EsRejectedExecutionHandler rejectedExecutionHandler; if (queueCapacity < 0) { queue = ConcurrentCollections.newBlockingQueue(); + rejectedExecutionHandler = new RejectOnShutdownOnlyPolicy(); } else { queue = new SizeBlockingQueue<>(ConcurrentCollections.newBlockingQueue(), queueCapacity); + rejectedExecutionHandler = new EsAbortPolicy(); } if (config.trackExecutionTime()) { return new TaskExecutionTimeTrackingEsThreadPoolExecutor( @@ -142,7 +145,7 @@ public static EsThreadPoolExecutor newFixed( queue, TimedRunnable::new, threadFactory, - new EsAbortPolicy(), + rejectedExecutionHandler, contextHolder, config ); @@ -155,7 +158,7 @@ public static EsThreadPoolExecutor newFixed( TimeUnit.MILLISECONDS, queue, threadFactory, - new EsAbortPolicy(), + rejectedExecutionHandler, contextHolder ); } @@ -329,6 +332,29 @@ public boolean offer(E e) { } } + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public void put(E e) { + // As the queue is unbounded, this method will always add to the queue. + super.offer(e); + } + + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public boolean add(E e) { + // As the queue is unbounded, this method will never return false. + return super.offer(e); + } + + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public boolean offer(E e, long timeout, TimeUnit unit) { + // As the queue is unbounded, this method will never return false. + return super.offer(e); + } } /** @@ -388,6 +414,15 @@ private void reject(ThreadPoolExecutor executor, Runnable task) { } } + static class RejectOnShutdownOnlyPolicy extends EsRejectedExecutionHandler { + @Override + public void rejectedExecution(Runnable task, ThreadPoolExecutor executor) { + assert executor.isShutdown() : executor; + incrementRejections(); + throw newRejectedException(task, executor, true); + } + } + public static class TaskTrackingConfig { // This is a random starting point alpha. TODO: revisit this with actual testing and/or make it configurable public static double DEFAULT_EWMA_ALPHA = 0.3; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionHandler.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionHandler.java index 3878a4a2dff9d..9457773eb8071 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionHandler.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionHandler.java @@ -9,6 +9,8 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; @@ -16,6 +18,7 @@ public abstract class EsRejectedExecutionHandler implements RejectedExecutionHandler { private final CounterMetric rejected = new CounterMetric(); + private LongCounter rejectionCounter = null; /** * The number of rejected executions. @@ -26,6 +29,14 @@ public long rejected() { protected void incrementRejections() { rejected.inc(); + if (rejectionCounter != null) { + rejectionCounter.increment(); + } + } + + public void registerCounter(MeterRegistry meterRegistry, String prefix, String name) { + rejectionCounter = meterRegistry.registerLongCounter(prefix + ".rejected.total", "number of rejected threads for " + name, "count"); + rejectionCounter.incrementBy(rejected()); } protected static EsRejectedExecutionException newRejectedException( diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java index 58b67b00b6f30..7470c8af4532c 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java @@ -10,9 +10,10 @@ import org.elasticsearch.core.Releasable; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.VarHandle; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; /** @@ -40,8 +41,8 @@ public Releasable acquire(T key) { return newLock; } } else { - int i = perNodeLock.count.get(); - if (i > 0 && perNodeLock.count.compareAndSet(i, i + 1)) { + int i = perNodeLock.count; + if (i > 0 && perNodeLock.tryIncCount(i)) { perNodeLock.lock(); return new ReleasableLock(key, perNodeLock); } @@ -59,11 +60,11 @@ public Releasable tryAcquire(T key) { } if (perNodeLock.tryLock()) { // ok we got it - make sure we increment it accordingly otherwise release it again int i; - while ((i = perNodeLock.count.get()) > 0) { + while ((i = perNodeLock.count) > 0) { // we have to do this in a loop here since even if the count is > 0 // there could be a concurrent blocking acquire that changes the count and then this CAS fails. Since we already got // the lock we should retry and see if we can still get it or if the count is 0. If that is the case and we give up. - if (perNodeLock.count.compareAndSet(i, i + 1)) { + if (perNodeLock.tryIncCount(i)) { return new ReleasableLock(key, perNodeLock); } } @@ -95,7 +96,7 @@ public boolean isHeldByCurrentThread(T key) { private void release(T key, KeyLock lock) { assert lock == map.get(key); - final int decrementAndGet = lock.count.decrementAndGet(); + final int decrementAndGet = lock.decCountAndGet(); lock.unlock(); if (decrementAndGet == 0) { map.remove(key, lock); @@ -103,30 +104,54 @@ private void release(T key, KeyLock lock) { assert decrementAndGet >= 0 : decrementAndGet + " must be >= 0 but wasn't"; } - private final class ReleasableLock extends AtomicBoolean implements Releasable { - final T key; + private final class ReleasableLock extends AtomicReference implements Releasable { final KeyLock lock; private ReleasableLock(T key, KeyLock lock) { - this.key = key; + super(key); this.lock = lock; } @Override public void close() { - if (compareAndSet(false, true)) { - release(key, lock); + T k = getAndSet(null); + if (k != null) { + release(k, lock); } } } - @SuppressWarnings("serial") private static final class KeyLock extends ReentrantLock { + private static final VarHandle VH_COUNT_FIELD; + + static { + try { + VH_COUNT_FIELD = MethodHandles.lookup().in(KeyLock.class).findVarHandle(KeyLock.class, "count", int.class); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + @SuppressWarnings("FieldMayBeFinal") // updated via VH_COUNT_FIELD (and _only_ via VH_COUNT_FIELD) + private volatile int count = 1; + KeyLock() { super(); } - private final AtomicInteger count = new AtomicInteger(1); + int decCountAndGet() { + do { + int i = count; + int newCount = i - 1; + if (VH_COUNT_FIELD.weakCompareAndSet(this, i, newCount)) { + return newCount; + } + } while (true); + } + + boolean tryIncCount(int expectedCount) { + return VH_COUNT_FIELD.compareAndSet(this, expectedCount, expectedCount + 1); + } } /** diff --git a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java index ec315f5200978..83660cede004e 100644 --- a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java +++ b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java @@ -12,9 +12,11 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.coordination.ClusterFormationFailureHelper; import org.elasticsearch.cluster.coordination.PeersResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -24,7 +26,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequestOptions; @@ -40,6 +41,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; +import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static org.elasticsearch.core.Strings.format; @@ -352,10 +354,17 @@ protected void startProbe(TransportAddress transportAddress) { } } + public Set getMastersOfPeers() { + synchronized (mutex) { + return peersByAddress.values().stream().flatMap(p -> p.lastKnownMasterNode.stream()).collect(Collectors.toSet()); + } + } + private class Peer { private final TransportAddress transportAddress; private final SetOnce probeConnectionResult = new SetOnce<>(); private volatile boolean peersRequestInFlight; + private Optional lastKnownMasterNode = Optional.empty(); Peer(TransportAddress transportAddress) { this.transportAddress = transportAddress; @@ -439,9 +448,20 @@ public void onResponse(ProbeConnectionResult connectResult) { @Override public void onFailure(Exception e) { if (verboseFailureLogging) { + + final String believedMasterBy; + synchronized (mutex) { + believedMasterBy = peersByAddress.values() + .stream() + .filter(p -> p.lastKnownMasterNode.map(DiscoveryNode::getAddress).equals(Optional.of(transportAddress))) + .findFirst() + .map(p -> " [current master according to " + p.getDiscoveryNode().descriptionWithoutAttributes() + "]") + .orElse(""); + } + if (logger.isDebugEnabled()) { // log message at level WARN, but since DEBUG logging is enabled we include the full stack trace - logger.warn(() -> format("%s discovery result", Peer.this), e); + logger.warn(() -> format("%s%s discovery result", Peer.this, believedMasterBy), e); } else { final StringBuilder messageBuilder = new StringBuilder(); Throwable cause = e; @@ -452,7 +472,14 @@ public void onFailure(Exception e) { final String message = messageBuilder.length() < 1024 ? messageBuilder.toString() : (messageBuilder.substring(0, 1023) + "..."); - logger.warn("{} discovery result{}", Peer.this, message); + logger.warn( + "{}{} discovery result{}; for summary, see logs from {}; for troubleshooting guidance, see {}", + Peer.this, + believedMasterBy, + message, + ClusterFormationFailureHelper.class.getCanonicalName(), + ReferenceDocs.DISCOVERY_TROUBLESHOOTING + ); } } else { logger.debug(() -> format("%s discovery result", Peer.this), e); @@ -504,6 +531,7 @@ public void handleResponse(PeersResponse response) { return; } + lastKnownMasterNode = response.getMasterNode(); response.getMasterNode().ifPresent(node -> startProbe(node.getAddress())); for (DiscoveryNode node : response.getKnownPeers()) { startProbe(node.getAddress()); @@ -524,7 +552,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return clusterCoordinationExecutor; } }; @@ -545,7 +573,13 @@ Releasable getConnectionReference() { @Override public String toString() { - return "address [" + transportAddress + "], node [" + getDiscoveryNode() + "], requesting [" + peersRequestInFlight + "]"; + return "address [" + + transportAddress + + "], node [" + + Optional.ofNullable(probeConnectionResult.get()) + .map(result -> result.getDiscoveryNode().descriptionWithoutAttributes()) + .orElse("unknown") + + (peersRequestInFlight ? " [request in flight]" : ""); } } } diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 68a1c54fd9869..39c33eb1684bb 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.env.NodeEnvironment; @@ -63,7 +62,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction private static final Logger logger = LogManager.getLogger(TransportNodesListGatewayStartedShards.class); public static final String ACTION_NAME = "internal:gateway/local/started_shards"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final Settings settings; private final NodeEnvironment nodeEnv; diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index c1efa58a50c86..eeff060c174da 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.health.stats.HealthApiStats; @@ -47,7 +46,7 @@ public class GetHealthAction extends ActionType { public static final String NAME = "cluster:monitor/health_api"; private GetHealthAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } public static class Response extends ActionResponse implements ChunkedToXContent { diff --git a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java index 288837fb3c808..a113bad942cc8 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java +++ b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java @@ -374,7 +374,7 @@ void writeMetrics(List healthIndicatorResults) { if (metric == null) { metric = LongGaugeMetric.create( this.meterRegistry, - String.format(Locale.ROOT, "es.health.%s.red", metricName), + String.format(Locale.ROOT, "es.health.%s.red.status", metricName), String.format(Locale.ROOT, "%s: Red", metricName), "{cluster}" ); diff --git a/server/src/main/java/org/elasticsearch/health/node/FetchHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/FetchHealthInfoCacheAction.java index 6433e3028dbfa..f342daa413fd6 100644 --- a/server/src/main/java/org/elasticsearch/health/node/FetchHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/FetchHealthInfoCacheAction.java @@ -89,7 +89,7 @@ public int hashCode() { public static final String NAME = "cluster:monitor/fetch/health/info"; private FetchHealthInfoCacheAction() { - super(NAME, FetchHealthInfoCacheAction.Response::new); + super(NAME); } public static class TransportAction extends TransportHealthNodeAction< diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index 0415e0c90ba8a..94cd518051199 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -432,24 +432,24 @@ DiskHealthInfo getHealth(HealthMetadata healthMetadata, ClusterState clusterStat return new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS); } - ByteSizeValue totalBytes = ByteSizeValue.ofBytes(usage.getTotalBytes()); + ByteSizeValue totalBytes = ByteSizeValue.ofBytes(usage.totalBytes()); if (node.isDedicatedFrozenNode() || isDedicatedSearchNode(node)) { long frozenFloodStageThreshold = diskMetadata.getFreeBytesFrozenFloodStageWatermark(totalBytes).getBytes(); - if (usage.getFreeBytes() < frozenFloodStageThreshold) { + if (usage.freeBytes() < frozenFloodStageThreshold) { logger.debug("Flood stage disk watermark [{}] exceeded on {}", frozenFloodStageThreshold, usage); return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD); } return new DiskHealthInfo(HealthStatus.GREEN); } long floodStageThreshold = diskMetadata.getFreeBytesFloodStageWatermark(totalBytes).getBytes(); - if (usage.getFreeBytes() < floodStageThreshold) { + if (usage.freeBytes() < floodStageThreshold) { logger.debug("Flood stage disk watermark [{}] exceeded on {}", floodStageThreshold, usage); return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); } long highThreshold = diskMetadata.getFreeBytesHighWatermark(totalBytes).getBytes(); - if (usage.getFreeBytes() < highThreshold) { + if (usage.freeBytes() < highThreshold) { if (node.canContainData()) { // for data nodes only report YELLOW if shards can't move away from the node if (DiskCheck.hasRelocatingShards(clusterState, node) == false) { diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index d1961c597bc1e..9567331c678b5 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -140,7 +140,7 @@ public int hashCode() { public static final String NAME = "cluster:monitor/update/health/info"; private UpdateHealthInfoCacheAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class TransportAction extends TransportHealthNodeAction { diff --git a/server/src/main/java/org/elasticsearch/health/stats/HealthApiStatsAction.java b/server/src/main/java/org/elasticsearch/health/stats/HealthApiStatsAction.java index 7ee616ab2f3b9..9833a5368f058 100644 --- a/server/src/main/java/org/elasticsearch/health/stats/HealthApiStatsAction.java +++ b/server/src/main/java/org/elasticsearch/health/stats/HealthApiStatsAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.metrics.Counters; import org.elasticsearch.core.Nullable; import org.elasticsearch.transport.TransportRequest; @@ -36,7 +35,7 @@ public class HealthApiStatsAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 177e4d471cf30..cfd72bf6ae4a5 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.transport.PortsRange; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -51,7 +52,6 @@ import java.nio.channels.CancelledKeyException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -93,7 +93,7 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo private final Map httpChannels = new ConcurrentHashMap<>(); private final PlainActionFuture allClientsClosedListener = new PlainActionFuture<>(); private final RefCounted refCounted = AbstractRefCounted.of(() -> allClientsClosedListener.onResponse(null)); - private final Set httpServerChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set httpServerChannels = ConcurrentCollections.newConcurrentSet(); private final long shutdownGracePeriodMillis; private final HttpClientStatsTracker httpClientStatsTracker; diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index f4dbf8115da33..9719716c57ce4 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -95,6 +95,7 @@ public void sendResponse(RestResponse restResponse) { toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); } toClose.add(() -> tracer.stopTrace(request)); + toClose.add(restResponse); boolean success = false; String opaque = null; @@ -113,7 +114,6 @@ public void sendResponse(RestResponse restResponse) { final HttpResponse httpResponse; if (isHeadRequest == false && restResponse.isChunked()) { ChunkedRestResponseBody chunkedContent = restResponse.chunkedContent(); - toClose.add(chunkedContent); if (httpLogger != null && httpLogger.isBodyTracerEnabled()) { final var loggerStream = httpLogger.openResponseBodyLoggingStream(request.getRequestId()); toClose.add(() -> { @@ -131,8 +131,6 @@ public void sendResponse(RestResponse restResponse) { final BytesReference content = restResponse.content(); if (content instanceof Releasable releasable) { toClose.add(releasable); - } else if (restResponse.isChunked()) { - toClose.add(restResponse.chunkedContent()); } toClose.add(this::releaseOutputBuffer); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index f4edb8b1d4039..706a6ec8ccf02 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -143,6 +143,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.value(id); } + /** + * Returns a string representing the Elasticsearch release version of this index version, + * if applicable for this deployment, otherwise the raw version number. + */ + public String toReleaseVersion() { + return IndexVersions.VERSION_LOOKUP.apply(id); + } + @Override public String toString() { return Integer.toString(id); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 4419abba73c1b..ce321b012ab95 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -9,6 +9,7 @@ package org.elasticsearch.index; import org.apache.lucene.util.Version; +import org.elasticsearch.ReleaseVersions; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.UpdateForV9; @@ -21,6 +22,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.IntFunction; @SuppressWarnings("deprecation") public class IndexVersions { @@ -94,8 +96,10 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion UPGRADE_LUCENE_9_9 = def(8_500_006, Version.LUCENE_9_9_0); public static final IndexVersion NORI_DUPLICATES = def(8_500_007, Version.LUCENE_9_9_0); public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); - public static final IndexVersion ES_VERSION_8_13 = def(8_500_009, Version.LUCENE_9_9_1); + public static final IndexVersion ES_VERSION_8_12_1 = def(8_500_009, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_8_12_1_LUCENE_9_9_2 = def(8_500_010, Version.LUCENE_9_9_2); public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_00_0, Version.LUCENE_9_9_2); /* * STOP! READ THIS FIRST! No, really, @@ -204,4 +208,9 @@ static NavigableMap getAllVersionIds(Class cls) { static Collection getAllVersions() { return VERSION_IDS.values(); } + + static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class); + + // no instance + private IndexVersions() {} } diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 9b9cf8ad35c04..3849095a94e6e 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1007,7 +1007,8 @@ protected void writerSegmentStats(SegmentsStats stats) { stats.addIndexWriterMemoryInBytes(0); } - /** How much heap is used that would be freed by a refresh. Note that this may throw {@link AlreadyClosedException}. */ + /** How much heap is used that would be freed by a refresh. This includes both the current memory being freed and any remaining + * memory usage that could be freed, e.g., by refreshing. Note that this may throw {@link AlreadyClosedException}. */ public abstract long getIndexBufferRAMBytesUsed(); final Segment[] getSegmentInfo(SegmentInfos lastCommittedSegmentInfos) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index e774d420de912..7a817500c4ca5 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -73,6 +73,7 @@ public final class EngineConfig { private final LongSupplier globalCheckpointSupplier; private final Supplier retentionLeasesSupplier; private final Comparator leafSorter; + private final boolean useCompoundFile; /** * A supplier of the outstanding retention leases. This is used during merged operations to determine which operations that have been @@ -108,6 +109,9 @@ public Supplier retentionLeasesSupplier() { } }, Property.IndexScope, Property.NodeScope, Property.ServerlessPublic); + // don't convert to Setting<> and register... we only set this in tests and register via a test plugin + public static final String USE_COMPOUND_FILE = "index.use_compound_file"; + /** * Legacy index setting, kept for 7.x BWC compatibility. This setting has no effect in 8.x. Do not use. * TODO: Remove in 9.0 @@ -202,6 +206,8 @@ public EngineConfig( this.relativeTimeInNanosSupplier = relativeTimeInNanosSupplier; this.indexCommitListener = indexCommitListener; this.promotableToPrimary = promotableToPrimary; + // always use compound on flush - reduces # of file-handles on refresh + this.useCompoundFile = indexSettings.getSettings().getAsBoolean(USE_COMPOUND_FILE, true); } /** @@ -423,4 +429,11 @@ public Engine.IndexCommitListener getIndexCommitListener() { public boolean isPromotableToPrimary() { return promotableToPrimary; } + + /** + * @return whether the Engine's index writer should pack newly written segments in a compound file. Default is true. + */ + public boolean getUseCompoundFile() { + return useCompoundFile; + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 08fc9e55fd408..c3f324fc49e82 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -564,21 +564,19 @@ private void bootstrapAppendOnlyInfoFromWriter(IndexWriter writer) { @Override public void recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo, ActionListener listener) { - ActionListener.run(listener, l -> { - try (var ignored = acquireEnsureOpenRef()) { - if (pendingTranslogRecovery.get() == false) { - throw new IllegalStateException("Engine has already been recovered"); - } - recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo, l.delegateResponse((ll, e) -> { - try { - pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush - failEngine("failed to recover from translog", e); - } catch (Exception inner) { - e.addSuppressed(inner); - } - ll.onFailure(e); - })); + ActionListener.runWithResource(listener, this::acquireEnsureOpenRef, (l, ignoredRef) -> { + if (pendingTranslogRecovery.get() == false) { + throw new IllegalStateException("Engine has already been recovered"); } + recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo, l.delegateResponse((ll, e) -> { + try { + pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush + failEngine("failed to recover from translog", e); + } catch (Exception inner) { + e.addSuppressed(inner); + } + ll.onFailure(e); + })); }); } @@ -813,7 +811,7 @@ private GetResult getFromTranslog( index, mappingLookup, documentParser, - config().getAnalyzer(), + config(), translogInMemorySegmentsCount::incrementAndGet ); final Engine.Searcher searcher = new Engine.Searcher( @@ -2095,14 +2093,14 @@ protected final RefreshResult refresh(String source, SearcherScope scope, boolea @Override public void writeIndexingBuffer() throws IOException { - final long versionMapBytesUsed = versionMap.ramBytesUsedForRefresh(); + final long reclaimableVersionMapBytes = versionMap.reclaimableRefreshRamBytes(); // Only count bytes that are not already being written to disk. Note: this number may be negative at times if these two metrics get // updated concurrently. It's fine as it's only being used as a heuristic to decide on a full refresh vs. writing a single segment. // TODO: it might be more relevant to use the RAM usage of the largest DWPT as opposed to the overall RAM usage? Can we get this // exposed in Lucene? final long indexWriterBytesUsed = indexWriter.ramBytesUsed() - indexWriter.getFlushingBytes(); - if (versionMapBytesUsed >= indexWriterBytesUsed) { + if (reclaimableVersionMapBytes >= indexWriterBytesUsed) { // This method expects to reclaim memory quickly, so if the version map is using more memory than the IndexWriter buffer then we // do a refresh, which is the only way to reclaim memory from the version map. IndexWriter#flushNextBuffer has similar logic: if // pending deletes occupy more than half of RAMBufferSizeMB then deletes are applied too. @@ -2113,7 +2111,7 @@ public void writeIndexingBuffer() throws IOException { } } - private void reclaimVersionMapMemory() { + protected void reclaimVersionMapMemory() { // If we're already halfway through the flush thresholds, then we do a flush. This will save us from writing segments twice // independently in a short period of time, once to reclaim version map memory and then to reclaim the translog. For // memory-constrained deployments that need to refresh often to reclaim memory, this may require flushing 2x more often than @@ -2699,7 +2697,14 @@ private IndexWriterConfig getIndexWriterConfig() { iwc.setSimilarity(engineConfig.getSimilarity()); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac()); iwc.setCodec(engineConfig.getCodec()); - iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh + boolean useCompoundFile = engineConfig.getUseCompoundFile(); + iwc.setUseCompoundFile(useCompoundFile); + if (useCompoundFile == false) { + logger.warn( + "[{}] is set to false, this should only be used in tests and can cause serious problems in production environments", + EngineConfig.USE_COMPOUND_FILE + ); + } if (config().getIndexSort() != null) { iwc.setIndexSort(config().getIndexSort()); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 1cee2a90ec3f1..cb9d2fa9b1472 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -40,7 +40,7 @@ public static final class VersionLookup { /** Tracks bytes used by current map, i.e. what is freed on refresh. For deletes, which are also added to tombstones, * we only account for the CHM entry here, and account for BytesRef/VersionValue against the tombstones, since refresh would not - * clear this RAM. */ + * clear this from RAM. */ final AtomicLong ramBytesUsed = new AtomicLong(); private static final VersionLookup EMPTY = new VersionLookup(Collections.emptyMap()); @@ -70,7 +70,7 @@ public void merge(VersionLookup versionLookup) { existingEntriesSize += existingValue == null ? 0 : mapEntryBytesUsed(entry.getKey(), existingValue); } map.putAll(versionLookup.map); - adjustRam(versionLookup.ramBytesUsed() - existingEntriesSize); + adjustRamUsage(versionLookup.ramBytesUsed() - existingEntriesSize); minDeleteTimestamp.accumulateAndGet(versionLookup.minDeleteTimestamp(), Math::min); } @@ -87,7 +87,7 @@ VersionValue put(BytesRef key, VersionValue value) { long ramAccounting = mapEntryBytesUsed(key, value); VersionValue previousValue = map.put(key, value); ramAccounting += previousValue == null ? 0 : -mapEntryBytesUsed(key, previousValue); - adjustRam(ramAccounting); + adjustRamUsage(ramAccounting); return previousValue; } @@ -110,7 +110,7 @@ void markAsUnsafe() { VersionValue remove(BytesRef uid) { VersionValue previousValue = map.remove(uid); if (previousValue != null) { - adjustRam(-mapEntryBytesUsed(uid, previousValue)); + adjustRamUsage(-mapEntryBytesUsed(uid, previousValue)); } return previousValue; } @@ -123,7 +123,7 @@ public long minDeleteTimestamp() { return minDeleteTimestamp.get(); } - void adjustRam(long value) { + void adjustRamUsage(long value) { if (value != 0) { long v = ramBytesUsed.addAndGet(value); assert v >= 0 : "bytes=" + v; @@ -273,7 +273,7 @@ long ramBytesUsed() { /** * Tracks bytes used by tombstones (deletes) */ - private final AtomicLong ramBytesUsedTombstones = new AtomicLong(); + private final AtomicLong ramBytesUsedForTombstones = new AtomicLong(); @Override public void beforeRefresh() throws IOException { @@ -391,16 +391,16 @@ void putDeleteUnderLock(BytesRef uid, DeleteVersionValue version) { } private void putTombstone(BytesRef uid, DeleteVersionValue version) { - long uidRAMBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; - // Also enroll the delete into tombstones, and account for its RAM too: + long uidRamBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; + // Also enroll the delete into tombstones, and account for its RAM usage too: final VersionValue prevTombstone = tombstones.put(uid, version); - long accountRam = (BASE_BYTES_PER_CHM_ENTRY + version.ramBytesUsed() + uidRAMBytesUsed); + long ramBytes = (BASE_BYTES_PER_CHM_ENTRY + version.ramBytesUsed() + uidRamBytesUsed); // Deduct tombstones bytes used for the version we just removed or replaced: if (prevTombstone != null) { - accountRam -= (BASE_BYTES_PER_CHM_ENTRY + prevTombstone.ramBytesUsed() + uidRAMBytesUsed); + ramBytes -= (BASE_BYTES_PER_CHM_ENTRY + prevTombstone.ramBytesUsed() + uidRamBytesUsed); } - if (accountRam != 0) { - long v = ramBytesUsedTombstones.addAndGet(accountRam); + if (ramBytes != 0) { + long v = ramBytesUsedForTombstones.addAndGet(ramBytes); assert v >= 0 : "bytes=" + v; } } @@ -410,11 +410,11 @@ private void putTombstone(BytesRef uid, DeleteVersionValue version) { */ void removeTombstoneUnderLock(BytesRef uid) { assert assertKeyedLockHeldByCurrentThread(uid); - long uidRAMBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; + long uidRamBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; final VersionValue prev = tombstones.remove(uid); if (prev != null) { assert prev.isDelete(); - long v = ramBytesUsedTombstones.addAndGet(-(BASE_BYTES_PER_CHM_ENTRY + prev.ramBytesUsed() + uidRAMBytesUsed)); + long v = ramBytesUsedForTombstones.addAndGet(-(BASE_BYTES_PER_CHM_ENTRY + prev.ramBytesUsed() + uidRamBytesUsed)); assert v >= 0 : "bytes=" + v; } } @@ -465,37 +465,53 @@ synchronized void clear() { maps = new Maps(); tombstones.clear(); // NOTE: we can't zero this here, because a refresh thread could be calling InternalEngine.pruneDeletedTombstones at the same time, - // and this will lead to an assert trip. Presumably it's fine if our ramBytesUsedTombstones is non-zero after clear since the + // and this will lead to an assert trip. Presumably it's fine if our ramBytesUsedForTombstones is non-zero after clear since the // index is being closed: - // ramBytesUsedTombstones.set(0); + // ramBytesUsedForTombstones.set(0); } @Override public long ramBytesUsed() { - return maps.ramBytesUsed() + ramBytesUsedTombstones.get() + ramBytesUsedForArchive(); + return maps.ramBytesUsed() + ramBytesUsedForTombstones.get() + ramBytesUsedForArchive(); } /** - * Returns how much RAM would be freed up by refreshing. This is the RAM usage of the current version map. It doesn't include tombstones - * since they don't get cleared on refresh, nor the old version map that is being reclaimed. + * Returns how much RAM is used by refresh. This is the RAM usage of the current and old version maps, and the RAM usage of the + * archive, if any. */ long ramBytesUsedForRefresh() { - return maps.current.ramBytesUsed.get(); + return maps.ramBytesUsed() + archive.getRamBytesUsed(); + } + + /** + * Returns how much RAM could be reclaimed from the version map. + *

    + * In stateful, this is the RAM usage of the current version map, and could be reclaimed by refreshing. It doesn't include tombstones + * since they don't get cleared on refresh, nor the old version map that is being reclaimed. + *

    + * In stateless, this is the RAM usage of current and old version map plus the RAM usage of the parts of the archive that require + * a new unpromotable refresh. To reclaim all three components we need to refresh AND flush. + */ + long reclaimableRefreshRamBytes() { + return archive == LiveVersionMapArchive.NOOP_ARCHIVE + ? maps.current.ramBytesUsed.get() + : maps.ramBytesUsed() + archive.getReclaimableRamBytes(); } /** * Returns how much RAM would be freed up by cleaning out the LiveVersionMapArchive. */ long ramBytesUsedForArchive() { - return archive.getMemoryBytesUsed(); + return archive.getRamBytesUsed(); } /** - * Returns how much RAM is current being freed up by refreshing. This is the RAM usage of the previous version map that needs to stay - * around until operations are safely recorded in the Lucene index. + * Returns how much RAM is current being freed up by refreshing. In Stateful, this is the RAM usage of the previous version map + * that needs to stay around until operations are safely recorded in the Lucene index. In Stateless, this is the RAM usage of a + * fraction of the Archive entries that are kept around until an ongoing unpromotable refresh is finished. */ long getRefreshingBytes() { - return maps.old.ramBytesUsed.get(); + return archive == LiveVersionMapArchive.NOOP_ARCHIVE ? maps.old.ramBytesUsed.get() : archive.getRefreshingRamBytes(); } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java index 9ccbf6ac16fed..c112ae5f70307 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java @@ -40,13 +40,26 @@ default boolean isUnsafe() { } /** - * Returns how much memory is currently being used by the archive and would be freed up after - * unpromotables are refreshed. + * Returns the total memory usage if the Archive. */ - default long getMemoryBytesUsed() { + default long getRamBytesUsed() { return 0L; } + /** + * Returns how much memory could be freed up by creating a new commit and issuing a new unpromotable refresh. + */ + default long getReclaimableRamBytes() { + return 0; + } + + /** + * Returns how much memory will be freed once the current ongoing unpromotable refresh is finished. + */ + default long getRefreshingRamBytes() { + return 0; + } + LiveVersionMapArchive NOOP_ARCHIVE = new LiveVersionMapArchive() { @Override public void afterRefresh(LiveVersionMap.VersionLookup old) {} diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 7d5410cf488d7..b2326b749f970 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -525,13 +525,11 @@ public void recoverFromTranslog( final long recoverUpToSeqNo, ActionListener listener ) { - ActionListener.run(listener, l -> { - try (var ignored = acquireEnsureOpenRef()) { - try { - translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY); - } catch (final Exception e) { - throw new EngineException(shardId, "failed to recover from empty translog snapshot", e); - } + ActionListener.runWithResource(listener, this::acquireEnsureOpenRef, (l, ignoredRef) -> { + try { + translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY); + } catch (final Exception e) { + throw new EngineException(shardId, "failed to recover from empty translog snapshot", e); } l.onResponse(null); }); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index a09810750c66e..6e1c01c886145 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; @@ -83,10 +82,10 @@ final class TranslogDirectoryReader extends DirectoryReader { Translog.Index operation, MappingLookup mappingLookup, DocumentParser documentParser, - Analyzer analyzer, + EngineConfig engineConfig, Runnable onSegmentCreated ) throws IOException { - this(new TranslogLeafReader(shardId, operation, mappingLookup, documentParser, analyzer, onSegmentCreated)); + this(new TranslogLeafReader(shardId, operation, mappingLookup, documentParser, engineConfig, onSegmentCreated)); } private TranslogDirectoryReader(TranslogLeafReader leafReader) throws IOException { @@ -205,7 +204,7 @@ private static class TranslogLeafReader extends LeafReader { private final Translog.Index operation; private final MappingLookup mappingLookup; private final DocumentParser documentParser; - private final Analyzer analyzer; + private final EngineConfig engineConfig; private final Directory directory; private final Runnable onSegmentCreated; @@ -217,14 +216,14 @@ private static class TranslogLeafReader extends LeafReader { Translog.Index operation, MappingLookup mappingLookup, DocumentParser documentParser, - Analyzer analyzer, + EngineConfig engineConfig, Runnable onSegmentCreated ) { this.shardId = shardId; this.operation = operation; this.mappingLookup = mappingLookup; this.documentParser = documentParser; - this.analyzer = analyzer; + this.engineConfig = engineConfig; this.onSegmentCreated = onSegmentCreated; this.directory = new ByteBuffersDirectory(); this.uid = Uid.encodeId(operation.id()); @@ -264,7 +263,10 @@ private LeafReader createInMemoryLeafReader() { parsedDocs.updateSeqID(operation.seqNo(), operation.primaryTerm()); parsedDocs.version().setLongValue(operation.version()); - final IndexWriterConfig writeConfig = new IndexWriterConfig(analyzer).setOpenMode(IndexWriterConfig.OpenMode.CREATE); + // To guarantee indexability, we configure the analyzer and codec using the main engine configuration + final IndexWriterConfig writeConfig = new IndexWriterConfig(engineConfig.getAnalyzer()).setOpenMode( + IndexWriterConfig.OpenMode.CREATE + ).setCodec(engineConfig.getCodec()); try (IndexWriter writer = new IndexWriter(directory, writeConfig)) { writer.addDocument(parsedDocs.rootDoc()); final DirectoryReader reader = open(writer); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 187d59a88e2fd..7cadec68f3e61 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -128,7 +128,7 @@ protected Object parseSourceValue(Object value) { }; } - public ValueFetcher valueFetcher(Set sourcePaths, Object nullValue, String format) { + public ValueFetcher valueFetcher(Set sourcePaths, T nullValue, String format) { Function, List> formatter = getFormatter(format != null ? format : GeometryFormatterFactory.GEOJSON); return new ArraySourceValueFetcher(sourcePaths, nullValueAsSource(nullValue)) { @Override @@ -140,7 +140,19 @@ protected Object parseSourceValue(Object value) { }; } - protected abstract Object nullValueAsSource(Object nullValue); + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + // Currently we can only load from source in ESQL + return blockLoaderFromSource(blContext); + } + + protected BlockLoader blockLoaderFromSource(BlockLoaderContext blContext) { + ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); + // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) + return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); + } + + protected abstract Object nullValueAsSource(T nullValue); } private final Explicit ignoreMalformed; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java index 031b67c263505..9136a0dfbf550 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.TriFunction; -import org.elasticsearch.common.geo.GeometryFormatterFactory; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; @@ -23,6 +22,8 @@ import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; + /** Base class for spatial fields that only support indexing points */ public abstract class AbstractPointGeometryFieldMapper extends AbstractGeometryFieldMapper { @@ -174,20 +175,16 @@ protected AbstractPointFieldType( } @Override - protected Object nullValueAsSource(Object nullValue) { - if (nullValue == null) { - return null; - } - SpatialPoint point = (SpatialPoint) nullValue; - return point.toWKT(); + protected Object nullValueAsSource(T nullValue) { + return nullValue == null ? null : nullValue.toWKT(); } @Override public BlockLoader blockLoader(BlockLoaderContext blContext) { - // Currently we can only load from source in ESQL - ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); - // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) - return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); + if (blContext.fieldExtractPreference() == DOC_VALUES && hasDocValues()) { + return new BlockDocValuesReader.LongsBlockLoader(name()); + } + return blockLoaderFromSource(blContext); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java index c18c4db955a43..56f1faeb38a5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java @@ -64,14 +64,8 @@ public Orientation orientation() { } @Override - public BlockLoader blockLoader(BlockLoaderContext blContext) { - // TODO: Support shapes in ESQL - return null; - } - - @Override - protected Object nullValueAsSource(Object nullValue) { - // TODO: When we support shapes in ESQL; we need to return a shape in source format here + protected Object nullValueAsSource(T nullValue) { + // we don't support null value fors shapes return nullValue; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 4dd4521b565d8..54223e1e692f3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -247,7 +247,7 @@ private static DocumentParsingException wrapInDocumentParsingException(DocumentP } static Mapping createDynamicUpdate(DocumentParserContext context) { - if (context.getDynamicMappers().isEmpty() && context.getDynamicRuntimeFields().isEmpty()) { + if (context.hasDynamicMappersOrRuntimeFields() == false) { return null; } RootObjectMapper.Builder rootBuilder = context.updateRoot(); @@ -822,7 +822,7 @@ private static class NoOpObjectMapper extends ObjectMapper { } @Override - public ObjectMapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext) { + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { return this; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 700f0e492af73..b9dfc83d17683 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -339,6 +339,20 @@ public final void addDynamicMapper(Mapper mapper) { dynamicMappers.computeIfAbsent(mapper.name(), k -> new ArrayList<>()).add(mapper); } + /** + * @return true if either {@link #getDynamicMappers} or {@link #getDynamicRuntimeFields()} will return a non-empty result + */ + public final boolean hasDynamicMappersOrRuntimeFields() { + return hasDynamicMappers() || dynamicRuntimeFields.isEmpty() == false; + } + + /** + * @return true if either {@link #getDynamicMappers} will return a non-empty mapper list + */ + public final boolean hasDynamicMappers() { + return dynamicMappers.isEmpty() == false; + } + /** * Get dynamic mappers created as a result of parsing an incoming document. Responsible for exposing all the newly created * fields that need to be merged into the existing mappings. Used to create the required mapping update at the end of document parsing. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java index 0654f48d0382e..c24ff9bb9c277 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -55,7 +55,7 @@ public String path() { } @Override - public Mapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext) { + public Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { if ((mergeWith instanceof FieldAliasMapper) == false) { throw new IllegalArgumentException( "Cannot merge a field alias mapping [" + name() + "] with a mapping that is not for a field alias." diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 974d935a35e50..9ed23f61bf0ea 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -372,7 +372,7 @@ private static void checkNestedScopeCompatibility(String source, String target) public abstract Builder getMergeBuilder(); @Override - public final FieldMapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext) { + public final FieldMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { if (mergeWith == this) { return this; } @@ -394,9 +394,9 @@ public final FieldMapper merge(Mapper mergeWith, MapperBuilderContext mapperBuil return (FieldMapper) mergeWith; } Conflicts conflicts = new Conflicts(name()); - builder.merge((FieldMapper) mergeWith, conflicts, mapperBuilderContext); + builder.merge((FieldMapper) mergeWith, conflicts, mapperMergeContext); conflicts.check(); - return builder.build(mapperBuilderContext); + return builder.build(mapperMergeContext.getMapperBuilderContext()); } protected void checkIncomingMergeType(FieldMapper mergeWith) { @@ -449,19 +449,19 @@ public Builder add(FieldMapper.Builder builder) { return this; } - public Builder add(FieldMapper mapper) { + private void add(FieldMapper mapper) { mapperBuilders.put(mapper.simpleName(), context -> mapper); - return this; } - public Builder update(FieldMapper toMerge, MapperBuilderContext context) { + private void update(FieldMapper toMerge, MapperMergeContext context) { if (mapperBuilders.containsKey(toMerge.simpleName()) == false) { - add(toMerge); + if (context.decrementFieldBudgetIfPossible(toMerge.mapperSize())) { + add(toMerge); + } } else { - FieldMapper existing = mapperBuilders.get(toMerge.simpleName()).apply(context); + FieldMapper existing = mapperBuilders.get(toMerge.simpleName()).apply(context.getMapperBuilderContext()); add(existing.merge(toMerge, context)); } - return this; } public boolean hasMultiFields() { @@ -1220,11 +1220,11 @@ public Builder init(FieldMapper initializer) { return this; } - protected void merge(FieldMapper in, Conflicts conflicts, MapperBuilderContext mapperBuilderContext) { + protected void merge(FieldMapper in, Conflicts conflicts, MapperMergeContext mapperMergeContext) { for (Parameter param : getParameters()) { param.merge(in, conflicts); } - MapperBuilderContext childContext = mapperBuilderContext.createChildContext(in.simpleName()); + MapperMergeContext childContext = mapperMergeContext.createChildContext(in.simpleName()); for (FieldMapper newSubField : in.multiFields.mappers) { multiFieldsBuilder.update(newSubField, childContext); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 6f561bdb2dc4c..ea6bc2b73a208 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -640,6 +640,17 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { return null; } + public enum FieldExtractPreference { + /** + * Load the field from doc-values into a BlockLoader supporting doc-values. + */ + DOC_VALUES, + /** + * No preference. Leave the choice of where to load the field from up to the FieldType. + */ + NONE + } + /** * Arguments for {@link #blockLoader}. */ @@ -649,6 +660,13 @@ public interface BlockLoaderContext { */ String indexName(); + /** + * How the field should be extracted into the BlockLoader. The default is {@link FieldExtractPreference#NONE}, which means + * that the field type can choose where to load the field from. However, in some cases, the caller may have a preference. + * For example, when loading a spatial field for usage in STATS, it is preferable to load from doc-values. + */ + FieldExtractPreference fieldExtractPreference(); + /** * {@link SearchLookup} used for building scripts. */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index e977b0aac014a..ca15248c037bc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -73,9 +73,11 @@ public final String simpleName() { */ public abstract String typeName(); - /** Return the merge of {@code mergeWith} into this. - * Both {@code this} and {@code mergeWith} will be left unmodified. */ - public abstract Mapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext); + /** + * Return the merge of {@code mergeWith} into this. + * Both {@code this} and {@code mergeWith} will be left unmodified. + */ + public abstract Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext); /** * Validate any cross-field references made by this mapper @@ -133,4 +135,18 @@ public static FieldType freezeAndDeduplicateFieldType(FieldType fieldType) { } return fieldTypeDeduplicator.computeIfAbsent(fieldType, Function.identity()); } + + /** + * Returns the size this mapper counts against the {@linkplain MapperService#INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING field limit}. + *

    + * Needs to be in sync with {@link MappingLookup#getTotalFieldsCount()}. + */ + public int mapperSize() { + int size = 1; + for (Mapper mapper : this) { + size += mapper.mapperSize(); + } + return size; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java new file mode 100644 index 0000000000000..79adaf5966c5b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +/** + * Holds context used when merging mappings. + * As the merge process also involves building merged {@link Mapper.Builder}s, + * this also contains a {@link MapperBuilderContext}. + */ +public final class MapperMergeContext { + + private final MapperBuilderContext mapperBuilderContext; + private final NewFieldsBudget newFieldsBudget; + + private MapperMergeContext(MapperBuilderContext mapperBuilderContext, NewFieldsBudget newFieldsBudget) { + this.mapperBuilderContext = mapperBuilderContext; + this.newFieldsBudget = newFieldsBudget; + } + + /** + * The root context, to be used when merging a tree of mappers + */ + public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { + return new MapperMergeContext(MapperBuilderContext.root(isSourceSynthetic, isDataStream), NewFieldsBudget.of(newFieldsBudget)); + } + + /** + * Creates a new {@link MapperMergeContext} from a {@link MapperBuilderContext} + * @param mapperBuilderContext the {@link MapperBuilderContext} for this {@link MapperMergeContext} + * @param newFieldsBudget limits how many fields can be added during the merge process + * @return a new {@link MapperMergeContext}, wrapping the provided {@link MapperBuilderContext} + */ + public static MapperMergeContext from(MapperBuilderContext mapperBuilderContext, long newFieldsBudget) { + return new MapperMergeContext(mapperBuilderContext, NewFieldsBudget.of(newFieldsBudget)); + } + + /** + * Creates a new {@link MapperMergeContext} with a child {@link MapperBuilderContext}. + * The child {@link MapperMergeContext} context will share the same field limit. + * @param name the name of the child context + * @return a new {@link MapperMergeContext} with this context as its parent + */ + MapperMergeContext createChildContext(String name) { + return createChildContext(mapperBuilderContext.createChildContext(name)); + } + + /** + * Creates a new {@link MapperMergeContext} with a given child {@link MapperBuilderContext} + * The child {@link MapperMergeContext} context will share the same field limit. + * @param childContext the child {@link MapperBuilderContext} + * @return a new {@link MapperMergeContext}, wrapping the provided {@link MapperBuilderContext} + */ + MapperMergeContext createChildContext(MapperBuilderContext childContext) { + return new MapperMergeContext(childContext, newFieldsBudget); + } + + MapperBuilderContext getMapperBuilderContext() { + return mapperBuilderContext; + } + + boolean decrementFieldBudgetIfPossible(int fieldSize) { + return newFieldsBudget.decrementIfPossible(fieldSize); + } + + /** + * Keeps track of how many new fields can be added during mapper merge. + * The field budget is shared across instances of {@link MapperMergeContext} that are created via + * {@link MapperMergeContext#createChildContext}. + * This ensures that fields that are consumed by one child object mapper also decrement the budget for another child object. + * Not thread safe.The same instance may not be modified by multiple threads. + */ + private interface NewFieldsBudget { + + static NewFieldsBudget of(long fieldsBudget) { + if (fieldsBudget == Long.MAX_VALUE) { + return Unlimited.INSTANCE; + } + return new Limited(fieldsBudget); + } + + boolean decrementIfPossible(long fieldSize); + + final class Unlimited implements NewFieldsBudget { + + private static final Unlimited INSTANCE = new Unlimited(); + + private Unlimited() {} + + @Override + public boolean decrementIfPossible(long fieldSize) { + return true; + } + + } + + final class Limited implements NewFieldsBudget { + + private long fieldsBudget; + + Limited(long fieldsBudget) { + this.fieldsBudget = fieldsBudget; + } + + @Override + public boolean decrementIfPossible(long fieldSize) { + if (fieldsBudget >= fieldSize) { + fieldsBudget -= fieldSize; + return true; + } + return false; + } + + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b714eabbd2636..20bd7cf4a87e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -56,9 +56,23 @@ public class MapperService extends AbstractIndexComponent implements Closeable { */ public enum MergeReason { /** - * Pre-flight check before sending a mapping update to the master + * Pre-flight check before sending a dynamic mapping update to the master */ - MAPPING_UPDATE_PREFLIGHT, + MAPPING_AUTO_UPDATE_PREFLIGHT { + @Override + public boolean isAutoUpdate() { + return true; + } + }, + /** + * Dynamic mapping updates + */ + MAPPING_AUTO_UPDATE { + @Override + public boolean isAutoUpdate() { + return true; + } + }, /** * Create or update a mapping. */ @@ -72,7 +86,11 @@ public enum MergeReason { * if a shard was moved to a different node or for administrative * purposes. */ - MAPPING_RECOVERY + MAPPING_RECOVERY; + + public boolean isAutoUpdate() { + return false; + } } public static final String SINGLE_MAPPING_NAME = "_doc"; @@ -364,7 +382,7 @@ boolean assertNoUpdateRequired(final IndexMetadata newIndexMetadata) { } public void merge(IndexMetadata indexMetadata, MergeReason reason) { - assert reason != MergeReason.MAPPING_UPDATE_PREFLIGHT; + assert reason != MergeReason.MAPPING_AUTO_UPDATE_PREFLIGHT; MappingMetadata mappingMetadata = indexMetadata.mapping(); if (mappingMetadata != null) { merge(mappingMetadata.type(), mappingMetadata.source(), reason); @@ -521,7 +539,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. // We should identify these cases reliably and save expensive serialization here DocumentMapper newMapper = newDocumentMapper(mapping, reason, mapping.toCompressedXContent()); - if (reason == MergeReason.MAPPING_UPDATE_PREFLIGHT) { + if (reason == MergeReason.MAPPING_AUTO_UPDATE_PREFLIGHT) { return newMapper; } this.mapper = newMapper; @@ -559,11 +577,15 @@ public Mapping parseMapping(String mappingType, Map mappingSourc } public static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMapping, MergeReason reason) { + return mergeMappings(currentMapper, incomingMapping, reason, Long.MAX_VALUE); + } + + static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMapping, MergeReason reason, long newFieldsBudget) { Mapping newMapping; if (currentMapper == null) { - newMapping = incomingMapping; + newMapping = incomingMapping.withFieldsBudget(newFieldsBudget); } else { - newMapping = currentMapper.mapping().merge(incomingMapping, reason); + newMapping = currentMapper.mapping().merge(incomingMapping, reason, newFieldsBudget); } return newMapping; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index fb07ddbc56d83..903e4e5da5b29 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -133,10 +133,12 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { * * @param mergeWith the new mapping to merge into this one. * @param reason the reason this merge was initiated. + * @param newFieldsBudget how many new fields can be added during the merge process * @return the resulting merged mapping. */ - Mapping merge(Mapping mergeWith, MergeReason reason) { - RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, MapperBuilderContext.root(isSourceSynthetic(), false)); + Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, newFieldsBudget); + RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, mergeContext); // When merging metadata fields as part of applying an index template, new field definitions // completely overwrite existing ones instead of being merged. This behavior matches how we @@ -148,7 +150,7 @@ Mapping merge(Mapping mergeWith, MergeReason reason) { if (mergeInto == null || reason == MergeReason.INDEX_TEMPLATE) { merged = metaMergeWith; } else { - merged = (MetadataFieldMapper) mergeInto.merge(metaMergeWith, MapperBuilderContext.root(isSourceSynthetic(), false)); + merged = (MetadataFieldMapper) mergeInto.merge(metaMergeWith, mergeContext); } mergedMetadataMappers.put(merged.getClass(), merged); } @@ -169,6 +171,18 @@ Mapping merge(Mapping mergeWith, MergeReason reason) { return new Mapping(mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta); } + /** + * Returns a copy of this mapper that ensures that the number of fields isn't greater than the provided fields budget. + * @param fieldsBudget the maximum number of fields this mapping may have + */ + public Mapping withFieldsBudget(long fieldsBudget) { + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, fieldsBudget); + // get a copy of the root mapper, without any fields + RootObjectMapper shallowRoot = root.withoutMappers(); + // calling merge on the shallow root to ensure we're only adding as many fields as allowed by the fields budget + return new Mapping(shallowRoot.merge(root, MergeReason.MAPPING_RECOVERY, mergeContext), metadataMappers, meta); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { root.toXContent(builder, params, (b, params1) -> { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 4880ce5edc204..0172c22c0b176 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -271,7 +271,7 @@ private void checkFieldLimit(long limit) { } void checkFieldLimit(long limit, int additionalFieldsToAdd) { - if (getTotalFieldsCount() + additionalFieldsToAdd - mapping.getSortedMetadataMappers().length > limit) { + if (exceedsLimit(limit, additionalFieldsToAdd)) { throw new IllegalArgumentException( "Limit of total fields [" + limit @@ -281,6 +281,10 @@ void checkFieldLimit(long limit, int additionalFieldsToAdd) { } } + boolean exceedsLimit(long limit, int additionalFieldsToAdd) { + return getTotalFieldsCount() + additionalFieldsToAdd - mapping.getSortedMetadataMappers().length > limit; + } + private void checkDimensionFieldLimit(long limit) { long dimensionFieldCount = fieldMappers.values() .stream() diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 257b2270176bc..268d028be91a6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -186,6 +186,21 @@ public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { return builder; } + @Override + NestedObjectMapper withoutMappers() { + return new NestedObjectMapper( + simpleName(), + fullPath(), + Map.of(), + enabled, + dynamic, + includeInParent, + includeInRoot, + nestedTypePath, + nestedTypeFilter + ); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(simpleName()); @@ -207,12 +222,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperBuilderContext parentBuilderContext) { + public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { if ((mergeWith instanceof NestedObjectMapper) == false) { MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; - var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + return merge(mergeWithObject, reason, parentMergeContext); + } + + ObjectMapper merge(NestedObjectMapper mergeWithObject, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { + var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); Explicit incInParent = this.includeInParent; Explicit incInRoot = this.includeInRoot; if (reason == MapperService.MergeReason.INDEX_TEMPLATE) { @@ -230,6 +249,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, Ma throw new MapperException("the [include_in_root] parameter can't be updated on a nested object mapping"); } } + MapperBuilderContext parentBuilderContext = parentMergeContext.getMapperBuilderContext(); if (parentBuilderContext instanceof NestedMapperBuilderContext nc) { if (nc.parentIncludedInRoot && incInParent.value()) { incInRoot = Explicit.IMPLICIT_FALSE; @@ -253,12 +273,15 @@ public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, Ma } @Override - protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBuilderContext, String name) { + protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeContext, String name) { + MapperBuilderContext mapperBuilderContext = mapperMergeContext.getMapperBuilderContext(); boolean parentIncludedInRoot = this.includeInRoot.value(); if (mapperBuilderContext instanceof NestedMapperBuilderContext == false) { parentIncludedInRoot |= this.includeInParent.value(); } - return new NestedMapperBuilderContext(mapperBuilderContext.buildFullName(name), parentIncludedInRoot); + return mapperMergeContext.createChildContext( + new NestedMapperBuilderContext(mapperBuilderContext.buildFullName(name), parentIncludedInRoot) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 068f5882f5eb3..6ced2b49bb84a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -162,7 +162,7 @@ protected final Map buildMappers(MapperBuilderContext mapperBuil // This can also happen due to multiple index templates being merged into a single mappings definition using // XContentHelper#mergeDefaults, again in case some index templates contained mappings for the same field using a // mix of object notation and dot notation. - mapper = existing.merge(mapper, mapperBuilderContext); + mapper = existing.merge(mapper, MapperMergeContext.from(mapperBuilderContext, Long.MAX_VALUE)); } mappers.put(mapper.simpleName(), mapper); } @@ -403,6 +403,14 @@ public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { return builder; } + /** + * Returns a copy of this object mapper that doesn't have any fields and runtime fields. + * This is typically used in the context of a mapper merge when there's not enough budget to add the entire object. + */ + ObjectMapper withoutMappers() { + return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, dynamic, Map.of()); + } + @Override public String name() { return this.fullPath; @@ -443,8 +451,8 @@ public final boolean subobjects() { } @Override - public ObjectMapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext) { - return merge(mergeWith, MergeReason.MAPPING_UPDATE, mapperBuilderContext); + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { + return merge(mergeWith, MergeReason.MAPPING_UPDATE, mapperMergeContext); } @Override @@ -454,12 +462,23 @@ public void validate(MappingLookup mappers) { } } - protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBuilderContext, String name) { - return mapperBuilderContext.createChildContext(name); + protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeContext, String name) { + return mapperMergeContext.createChildContext(name); + } + + public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + if (mergeWith instanceof ObjectMapper == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); + } + if (this instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) { + // TODO stop NestedObjectMapper extending ObjectMapper? + MapperErrors.throwNestedMappingConflictError(mergeWith.name()); + } + return merge((ObjectMapper) mergeWith, reason, parentMergeContext); } - public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + ObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + var mergeResult = MergeResult.build(this, mergeWith, reason, parentMergeContext); return new ObjectMapper( simpleName(), fullPath, @@ -476,21 +495,12 @@ protected record MergeResult( ObjectMapper.Dynamic dynamic, Map mappers ) { - - public static MergeResult build( + static MergeResult build( ObjectMapper existing, - Mapper mergeWith, + ObjectMapper mergeWithObject, MergeReason reason, - MapperBuilderContext parentBuilderContext + MapperMergeContext parentMergeContext ) { - if ((mergeWith instanceof ObjectMapper) == false) { - MapperErrors.throwObjectMappingConflictError(mergeWith.name()); - } - if (existing instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) { - // TODO stop NestedObjectMapper extending ObjectMapper? - MapperErrors.throwNestedMappingConflictError(mergeWith.name()); - } - ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; final Explicit enabled; if (mergeWithObject.enabled.explicit()) { if (reason == MergeReason.INDEX_TEMPLATE) { @@ -517,8 +527,8 @@ public static MergeResult build( } else { subObjects = existing.subobjects; } - MapperBuilderContext objectBuilderContext = existing.createChildContext(parentBuilderContext, existing.simpleName()); - Map mergedMappers = buildMergedMappers(existing, mergeWith, reason, objectBuilderContext); + MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); + Map mergedMappers = buildMergedMappers(existing, mergeWithObject, reason, objectMergeContext); return new MergeResult( enabled, subObjects, @@ -529,19 +539,27 @@ public static MergeResult build( private static Map buildMergedMappers( ObjectMapper existing, - Mapper mergeWith, + ObjectMapper mergeWithObject, MergeReason reason, - MapperBuilderContext objectBuilderContext + MapperMergeContext objectMergeContext ) { - Map mergedMappers = null; - for (Mapper mergeWithMapper : mergeWith) { - Mapper mergeIntoMapper = (mergedMappers == null ? existing.mappers : mergedMappers).get(mergeWithMapper.simpleName()); - - Mapper merged; + Iterator iterator = mergeWithObject.iterator(); + if (iterator.hasNext() == false) { + return Map.copyOf(existing.mappers); + } + Map mergedMappers = new HashMap<>(existing.mappers); + while (iterator.hasNext()) { + Mapper mergeWithMapper = iterator.next(); + Mapper mergeIntoMapper = mergedMappers.get(mergeWithMapper.simpleName()); + Mapper merged = null; if (mergeIntoMapper == null) { - merged = mergeWithMapper; + if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.mapperSize())) { + merged = mergeWithMapper; + } else if (mergeWithMapper instanceof ObjectMapper om) { + merged = truncateObjectMapper(reason, objectMergeContext, om); + } } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { - merged = objectMapper.merge(mergeWithMapper, reason, objectBuilderContext); + merged = objectMapper.merge(mergeWithMapper, reason, objectMergeContext); } else { assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; if (mergeWithMapper instanceof NestedObjectMapper) { @@ -555,20 +573,25 @@ private static Map buildMergedMappers( if (reason == MergeReason.INDEX_TEMPLATE) { merged = mergeWithMapper; } else { - merged = mergeIntoMapper.merge(mergeWithMapper, objectBuilderContext); + merged = mergeIntoMapper.merge(mergeWithMapper, objectMergeContext); } } - if (mergedMappers == null) { - mergedMappers = new HashMap<>(existing.mappers); + if (merged != null) { + mergedMappers.put(merged.simpleName(), merged); } - mergedMappers.put(merged.simpleName(), merged); } - if (mergedMappers != null) { - mergedMappers = Map.copyOf(mergedMappers); - } else { - mergedMappers = Map.copyOf(existing.mappers); + return Map.copyOf(mergedMappers); + } + + private static ObjectMapper truncateObjectMapper(MergeReason reason, MapperMergeContext context, ObjectMapper objectMapper) { + // there's not enough capacity for the whole object mapper, + // so we're just trying to add the shallow object, without it's sub-fields + ObjectMapper shallowObjectMapper = objectMapper.withoutMappers(); + if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.mapperSize())) { + // now trying to add the sub-fields one by one via a merge, until we hit the limit + return shallowObjectMapper.merge(objectMapper, reason, context); } - return mergedMappers; + return null; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index 05f05dd5be941..bf540eae5ed49 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -160,7 +160,7 @@ public void addDynamicMappingsUpdate(Mapping update) { if (dynamicMappingsUpdate == null) { dynamicMappingsUpdate = update; } else { - dynamicMappingsUpdate = dynamicMappingsUpdate.merge(update, MergeReason.MAPPING_UPDATE); + dynamicMappingsUpdate = dynamicMappingsUpdate.merge(update, MergeReason.MAPPING_AUTO_UPDATE, Long.MAX_VALUE); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java index 4ad0873b66d50..98f8f21be704a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java @@ -63,10 +63,10 @@ public FieldMapper.Builder init(FieldMapper initializer) { } @Override - protected void merge(FieldMapper in, Conflicts conflicts, MapperBuilderContext mapperBuilderContext) { + protected void merge(FieldMapper in, Conflicts conflicts, MapperMergeContext mapperMergeContext) { assert in instanceof PlaceHolderFieldMapper; unknownParams.putAll(((PlaceHolderFieldMapper) in).unknownParams); - super.merge(in, conflicts, mapperBuilderContext); + super.merge(in, conflicts, mapperMergeContext); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 65fce1b69b8cc..5d719ae4f5da7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -155,6 +155,22 @@ public RootObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { return builder; } + @Override + RootObjectMapper withoutMappers() { + return new RootObjectMapper( + simpleName(), + enabled, + subobjects, + dynamic, + Map.of(), + Map.of(), + dynamicDateTimeFormatters, + dynamicTemplates, + dateDetection, + numericDetection + ); + } + /** * Public API */ @@ -192,16 +208,22 @@ RuntimeField getRuntimeField(String name) { } @Override - protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBuilderContext, String name) { - assert Objects.equals(mapperBuilderContext.buildFullName("foo"), "foo"); - return mapperBuilderContext; + protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeContext, String name) { + assert Objects.equals(mapperMergeContext.getMapperBuilderContext().buildFullName("foo"), "foo"); + return mapperMergeContext; } @Override - public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - final var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + if (mergeWith instanceof RootObjectMapper == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); + } + return merge((RootObjectMapper) mergeWith, reason, parentMergeContext); + } + + RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, MapperMergeContext parentMergeContext) { + final var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); final Explicit numericDetection; - RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; if (mergeWithObject.numericDetection.explicit()) { numericDetection = mergeWithObject.numericDetection; } else { @@ -242,12 +264,15 @@ public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilde dynamicTemplates = this.dynamicTemplates; } final Map runtimeFields = new HashMap<>(this.runtimeFields); - assert this.runtimeFields != mergeWithObject.runtimeFields; for (Map.Entry runtimeField : mergeWithObject.runtimeFields.entrySet()) { if (runtimeField.getValue() == null) { runtimeFields.remove(runtimeField.getKey()); - } else { + } else if (runtimeFields.containsKey(runtimeField.getKey())) { runtimeFields.put(runtimeField.getKey(), runtimeField.getValue()); + } else { + if (parentMergeContext.decrementFieldBudgetIfPossible(1)) { + runtimeFields.put(runtimeField.getValue().name(), runtimeField.getValue()); + } } } @@ -502,4 +527,13 @@ private static boolean processField( } return false; } + + @Override + public int mapperSize() { + int size = runtimeFields().size(); + for (Mapper mapper : this) { + size += mapper.mapperSize(); + } + return size; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 99efa5c6b896b..1885869073711 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -994,10 +994,11 @@ protected String delegatingTo() { * using whatever */ private BlockSourceReader.LeafIteratorLookup blockReaderDisiLookup(BlockLoaderContext blContext) { - if (getTextSearchInfo().hasNorms()) { - return BlockSourceReader.lookupFromNorms(name()); - } - if (isIndexed() == false && isStored() == false) { + if (isIndexed()) { + if (getTextSearchInfo().hasNorms()) { + return BlockSourceReader.lookupFromNorms(name()); + } + } else if (isStored() == false) { return BlockSourceReader.lookupMatchingAll(); } return BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java index 9245e78602eb7..62bd8ec994639 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java @@ -113,7 +113,7 @@ public static void createField(DocumentParserContext context, IndexRouting.Extra } long timestamp = timestampFields.get(0).numericValue().longValue(); byte[] suffix = new byte[16]; - String id = createId(context.getDynamicMappers().isEmpty(), routingBuilder, tsid, timestamp, suffix); + String id = createId(context.hasDynamicMappers() == false, routingBuilder, tsid, timestamp, suffix); /* * Make sure that _id from extracting the tsid matches that _id * from extracting the _source. This should be true for all valid diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 27424d4591ba6..f165361ded105 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -29,6 +29,15 @@ import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.queries.function.FunctionQuery; +import org.apache.lucene.queries.function.valuesource.ByteKnnVectorFieldSource; +import org.apache.lucene.queries.function.valuesource.ByteVectorSimilarityFunction; +import org.apache.lucene.queries.function.valuesource.ConstKnnByteVectorValueSource; +import org.apache.lucene.queries.function.valuesource.ConstKnnFloatValueSource; +import org.apache.lucene.queries.function.valuesource.FloatKnnVectorFieldSource; +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.Query; @@ -1063,6 +1072,67 @@ public Query createKnnQuery( return knnQuery; } + public Query createExactKnnQuery(float[] queryVector) { + if (isIndexed() == false) { + throw new IllegalArgumentException( + "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" + ); + } + if (queryVector.length != dims) { + throw new IllegalArgumentException( + "the query vector has a different dimension [" + queryVector.length + "] than the index vectors [" + dims + "]" + ); + } + elementType.checkVectorBounds(queryVector); + if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { + float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); + elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); + if (similarity == VectorSimilarity.COSINE + && ElementType.FLOAT.equals(elementType) + && indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + queryVector = Arrays.copyOf(queryVector, queryVector.length); + for (int i = 0; i < queryVector.length; i++) { + queryVector[i] /= length; + } + } + } + VectorSimilarityFunction vectorSimilarityFunction = similarity.vectorSimilarityFunction(indexVersionCreated, elementType); + return switch (elementType) { + case BYTE -> { + byte[] bytes = new byte[queryVector.length]; + for (int i = 0; i < queryVector.length; i++) { + bytes[i] = (byte) queryVector[i]; + } + yield new BooleanQuery.Builder().add(new FieldExistsQuery(name()), BooleanClause.Occur.FILTER) + .add( + new FunctionQuery( + new ByteVectorSimilarityFunction( + vectorSimilarityFunction, + new ByteKnnVectorFieldSource(name()), + new ConstKnnByteVectorValueSource(bytes) + ) + ), + BooleanClause.Occur.SHOULD + ) + .build(); + } + case FLOAT -> new BooleanQuery.Builder().add(new FieldExistsQuery(name()), BooleanClause.Occur.FILTER) + .add( + new FunctionQuery( + new FloatVectorSimilarityFunction( + vectorSimilarityFunction, + new FloatKnnVectorFieldSource(name()), + new ConstKnnFloatValueSource(queryVector) + ) + ), + BooleanClause.Occur.SHOULD + ) + .build(); + }; + } + public Query createKnnQuery( float[] queryVector, int numCands, @@ -1082,7 +1152,6 @@ public Query createKnnQuery( ); } elementType.checkVectorBounds(queryVector); - if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); @@ -1110,6 +1179,7 @@ && isNotUnitVector(squaredMagnitude)) { case FLOAT -> parentFilter != null ? new ProfilingDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) : new ProfilingKnnFloatVectorQuery(name(), queryVector, numCands, filter); + }; if (similarityThreshold != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index f86142ffbe862..d3d7b46d3d729 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -296,6 +296,10 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws if (queryRewriteContext == null) { return this; } + final InnerHitsRewriteContext ihrc = queryRewriteContext.convertToInnerHitsRewriteContext(); + if (ihrc != null) { + return doInnerHitsRewrite(ihrc); + } final CoordinatorRewriteContext crc = queryRewriteContext.convertToCoordinatorRewriteContext(); if (crc != null) { return doCoordinatorRewrite(crc); @@ -342,6 +346,16 @@ protected QueryBuilder doIndexMetadataRewrite(final QueryRewriteContext context) return this; } + /** + * Optional rewrite logic that allows for optimization for extracting inner hits + * @param context an {@link InnerHitsRewriteContext} instance + * @return A {@link QueryBuilder} representing the rewritten query optimized for inner hit extraction + * @throws IOException if an error occurs while rewriting the query + */ + protected QueryBuilder doInnerHitsRewrite(final InnerHitsRewriteContext context) throws IOException { + return this; + } + /** * For internal usage only! * diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java new file mode 100644 index 0000000000000..0b437fa451e1b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.index.query; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xcontent.XContentParserConfiguration; + +import java.util.function.LongSupplier; + +/** + * Context object used to rewrite {@link QueryBuilder} instances into an optimized version for extracting inner_hits. + */ +public final class InnerHitsRewriteContext extends QueryRewriteContext { + public InnerHitsRewriteContext(final XContentParserConfiguration parserConfiguration, final LongSupplier nowInMillis) { + super(parserConfiguration, null, nowInMillis); + } + + @Override + public InnerHitsRewriteContext convertToInnerHitsRewriteContext() { + return this; + } + + @Override + @SuppressWarnings({ "rawtypes" }) + public void executeAsyncActions(ActionListener listener) { + // InnerHitsRewriteContext does not support async actions at all, and doesn't supply a valid `client` object + throw new UnsupportedOperationException("InnerHitsRewriteContext does not support async actions"); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java index 04ae0bb498841..47e4cf7273703 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java @@ -39,14 +39,14 @@ public MatchNoneQueryBuilder(String rewriteReason) { */ public MatchNoneQueryBuilder(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { rewriteReason = in.readOptionalString(); } } @Override protected void doWriteTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalString(rewriteReason); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 30a85f4941105..e36c4d608d59f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -159,6 +159,10 @@ public DataRewriteContext convertToDataRewriteContext() { return null; } + public InnerHitsRewriteContext convertToInnerHitsRewriteContext() { + return null; + } + /** * Returns the {@link MappedFieldType} for the provided field name. * If the field is not mapped, the behaviour depends on the index.query.parse.allow_unmapped_fields setting, which defaults to true. @@ -285,6 +289,13 @@ public IndexSettings getIndexSettings() { return indexSettings; } + /** + * Returns the MappingLookup for the queried index. + */ + public MappingLookup getMappingLookup() { + return mappingLookup; + } + /** * Given an index pattern, checks whether it matches against the current shard. The pattern * may represent a fully qualified index name if the search targets remote shards. diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 5a2b01838e27b..63cd598caa784 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -91,7 +91,7 @@ public final class SimpleQueryStringBuilder extends AbstractQueryBuilder searchFailures; private boolean timedOut; - private static final String TOOK_FIELD = "took"; - private static final String TIMED_OUT_FIELD = "timed_out"; - private static final String FAILURES_FIELD = "failures"; - - @SuppressWarnings("unchecked") - private static final ObjectParser PARSER = new ObjectParser<>( - "bulk_by_scroll_response", - true, - BulkByScrollResponseBuilder::new - ); - static { - PARSER.declareLong(BulkByScrollResponseBuilder::setTook, new ParseField(TOOK_FIELD)); - PARSER.declareBoolean(BulkByScrollResponseBuilder::setTimedOut, new ParseField(TIMED_OUT_FIELD)); - PARSER.declareObjectArray(BulkByScrollResponseBuilder::setFailures, (p, c) -> parseFailure(p), new ParseField(FAILURES_FIELD)); - // since the result of BulkByScrollResponse.Status are mixed we also parse that in this - Status.declareFields(PARSER); - } + static final String TOOK_FIELD = "took"; + static final String TIMED_OUT_FIELD = "timed_out"; + static final String FAILURES_FIELD = "failures"; public BulkByScrollResponse(StreamInput in) throws IOException { super(in); @@ -195,7 +171,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(TOOK_FIELD, took.millis()); builder.field(TIMED_OUT_FIELD, timedOut); status.innerXContent(builder, params); - builder.startArray("failures"); + builder.startArray(FAILURES_FIELD); for (Failure failure : bulkFailures) { builder.startObject(); failure.toXContent(builder, params); @@ -208,59 +184,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static BulkByScrollResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).buildResponse(); - } - - private static Object parseFailure(XContentParser parser) throws IOException { - ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser); - Token token; - String index = null; - String id = null; - Integer status = null; - Integer shardId = null; - String nodeId = null; - ElasticsearchException bulkExc = null; - ElasticsearchException searchExc = null; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, token, parser); - String name = parser.currentName(); - token = parser.nextToken(); - if (token == Token.START_ARRAY) { - parser.skipChildren(); - } else if (token == Token.START_OBJECT) { - switch (name) { - case SearchFailure.REASON_FIELD -> searchExc = ElasticsearchException.fromXContent(parser); - case Failure.CAUSE_FIELD -> bulkExc = ElasticsearchException.fromXContent(parser); - default -> parser.skipChildren(); - } - } else if (token == Token.VALUE_STRING) { - switch (name) { - // This field is the same as SearchFailure.index - case Failure.INDEX_FIELD -> index = parser.text(); - case Failure.ID_FIELD -> id = parser.text(); - case SearchFailure.NODE_FIELD -> nodeId = parser.text(); - } - } else if (token == Token.VALUE_NUMBER) { - switch (name) { - case Failure.STATUS_FIELD -> status = parser.intValue(); - case SearchFailure.SHARD_FIELD -> shardId = parser.intValue(); - } - } - } - if (bulkExc != null) { - return new Failure(index, id, bulkExc, RestStatus.fromCode(status)); - } else if (searchExc != null) { - if (status == null) { - return new SearchFailure(searchExc, index, shardId, nodeId); - } else { - return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status)); - } - } else { - throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present"); - } - } - @Override public String toString() { StringBuilder builder = new StringBuilder(); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 5db1732fc1590..42cf8a185ec7a 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -157,8 +157,8 @@ private static class ClientHit implements Hit { private final BytesReference source; ClientHit(SearchHit delegate) { - this.delegate = delegate; - source = delegate.hasSource() ? delegate.getSourceRef() : null; + this.delegate = delegate.asUnpooled(); // TODO: use pooled version here + source = this.delegate.hasSource() ? this.delegate.getSourceRef() : null; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java index b34c4dd3bfb43..c547c9f5902b1 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -16,6 +16,6 @@ public class DeleteByQueryAction extends ActionType { public static final String NAME = "indices:data/write/delete/byquery"; private DeleteByQueryAction() { - super(NAME, BulkByScrollResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java index 6ef9214663e5c..33294657a9f3d 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java @@ -15,6 +15,6 @@ public class ReindexAction extends ActionType { public static final String NAME = "indices:data/write/reindex"; private ReindexAction() { - super(NAME, BulkByScrollResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java index 6ec2e9f9b3232..b1642f369db37 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java @@ -15,6 +15,6 @@ public class UpdateByQueryAction extends ActionType { public static final String NAME = "indices:data/write/update/byquery"; private UpdateByQueryAction() { - super(NAME, BulkByScrollResponse::new); + super(NAME); } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java index 1f8a863fe3e7b..7d3df2c174a83 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java @@ -39,7 +39,7 @@ public class GlobalCheckpointSyncAction extends TransportReplicationAction< ReplicationResponse> { public static String ACTION_NAME = "indices:admin/seq_no/global_checkpoint_sync"; - public static ActionType TYPE = new ActionType<>(ACTION_NAME, ReplicationResponse::new); + public static ActionType TYPE = new ActionType<>(ACTION_NAME); @Inject public GlobalCheckpointSyncAction( diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java index 162d7311a0594..49548aa1a6353 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -45,9 +46,13 @@ public class RetentionLeaseActions { public static final long RETAIN_ALL = -1; - public static final ActionType ADD = ActionType.emptyResponse("indices:admin/seq_no/add_retention_lease"); - public static final ActionType RENEW = ActionType.emptyResponse("indices:admin/seq_no/renew_retention_lease"); - public static final ActionType REMOVE = ActionType.emptyResponse("indices:admin/seq_no/remove_retention_lease"); + public static final ActionType ADD = new ActionType<>("indices:admin/seq_no/add_retention_lease"); + public static final ActionType RENEW = new ActionType<>("indices:admin/seq_no/renew_retention_lease"); + public static final ActionType REMOVE = new ActionType<>("indices:admin/seq_no/remove_retention_lease"); + + public static final RemoteClusterActionType REMOTE_ADD = RemoteClusterActionType.emptyResponse(ADD.name()); + public static final RemoteClusterActionType REMOTE_RENEW = RemoteClusterActionType.emptyResponse(RENEW.name()); + public static final RemoteClusterActionType REMOTE_REMOVE = RemoteClusterActionType.emptyResponse(REMOVE.name()); abstract static class TransportRetentionLeaseAction> extends TransportSingleShardAction { diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java index 5046ea1cb4d0d..541e279d4cfbb 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java @@ -117,7 +117,7 @@ public ReplicationResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java index 469dcd09e8f63..d03a29922da07 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java @@ -127,7 +127,7 @@ public ReplicationResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index aa6e3e1d45003..65291a99c25a3 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1740,59 +1740,87 @@ public void prepareForIndexRecovery() { } /** - * A best effort to bring up this shard to the global checkpoint using the local translog before performing a peer recovery. + * A best-effort attempt to bring up this shard to the global checkpoint using the local translog before performing a peer recovery. * - * @return a sequence number that an operation-based peer recovery can start with. - * This is the first operation after the local checkpoint of the safe commit if exists. + * @param recoveryStartingSeqNoListener a listener to be completed with the sequence number from which an operation-based peer recovery + * can start. This is the first operation after the local checkpoint of the safe commit if exists. */ - public long recoverLocallyUpToGlobalCheckpoint() { - assert Thread.holdsLock(mutex) == false : "recover locally under mutex"; + public void recoverLocallyUpToGlobalCheckpoint(ActionListener recoveryStartingSeqNoListener) { + assert Thread.holdsLock(mutex) == false : "must not hold the mutex here"; if (state != IndexShardState.RECOVERING) { - throw new IndexShardNotRecoveringException(shardId, state); + recoveryStartingSeqNoListener.onFailure(new IndexShardNotRecoveringException(shardId, state)); + return; + } + try { + recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX); + } catch (Exception e) { + recoveryStartingSeqNoListener.onFailure(e); + return; } - recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX); assert routingEntry().recoverySource().getType() == RecoverySource.Type.PEER : "not a peer recovery [" + routingEntry() + "]"; - final Optional safeCommit; - final long globalCheckpoint; try { - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); - safeCommit = store.findSafeIndexCommit(globalCheckpoint); + final var translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); + final var globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); + final var safeCommit = store.findSafeIndexCommit(globalCheckpoint); + ActionListener.run(recoveryStartingSeqNoListener.delegateResponse((l, e) -> { + logger.debug(() -> format("failed to recover shard locally up to global checkpoint %s", globalCheckpoint), e); + l.onResponse(UNASSIGNED_SEQ_NO); + }), l -> doLocalRecovery(globalCheckpoint, safeCommit, l)); } catch (org.apache.lucene.index.IndexNotFoundException e) { logger.trace("skip local recovery as no index commit found"); - return UNASSIGNED_SEQ_NO; + recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO); } catch (Exception e) { logger.debug("skip local recovery as failed to find the safe commit", e); - return UNASSIGNED_SEQ_NO; + recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO); } - try { - maybeCheckIndex(); // check index here and won't do it again if ops-based recovery occurs - recoveryState.setLocalTranslogStage(); - if (safeCommit.isPresent() == false) { - logger.trace("skip local recovery as no safe commit found"); - return UNASSIGNED_SEQ_NO; - } - assert safeCommit.get().localCheckpoint <= globalCheckpoint : safeCommit.get().localCheckpoint + " > " + globalCheckpoint; - if (safeCommit.get().localCheckpoint == globalCheckpoint) { - logger.trace( - "skip local recovery as the safe commit is up to date; safe commit {} global checkpoint {}", - safeCommit.get(), - globalCheckpoint - ); - recoveryState.getTranslog().totalLocal(0); - return globalCheckpoint + 1; - } - if (indexSettings.getIndexMetadata().getState() == IndexMetadata.State.CLOSE - || IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings.getSettings())) { - logger.trace( - "skip local recovery as the index was closed or not allowed to write; safe commit {} global checkpoint {}", - safeCommit.get(), - globalCheckpoint - ); - recoveryState.getTranslog().totalLocal(0); - return safeCommit.get().localCheckpoint + 1; - } - try { + } + + private void doLocalRecovery( + long globalCheckpoint, + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional safeCommit, + ActionListener recoveryStartingSeqNoListener + ) { + maybeCheckIndex(); // check index here and won't do it again if ops-based recovery occurs + recoveryState.setLocalTranslogStage(); + if (safeCommit.isPresent() == false) { + logger.trace("skip local recovery as no safe commit found"); + recoveryStartingSeqNoListener.onResponse(UNASSIGNED_SEQ_NO); + return; + } + + assert safeCommit.get().localCheckpoint <= globalCheckpoint : safeCommit.get().localCheckpoint + " > " + globalCheckpoint; + if (safeCommit.get().localCheckpoint == globalCheckpoint) { + logger.trace( + "skip local recovery as the safe commit is up to date; safe commit {} global checkpoint {}", + safeCommit.get(), + globalCheckpoint + ); + recoveryState.getTranslog().totalLocal(0); + recoveryStartingSeqNoListener.onResponse(globalCheckpoint + 1); + return; + } + + if (indexSettings.getIndexMetadata().getState() == IndexMetadata.State.CLOSE + || IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings.getSettings())) { + logger.trace( + "skip local recovery as the index was closed or not allowed to write; safe commit {} global checkpoint {}", + safeCommit.get(), + globalCheckpoint + ); + recoveryState.getTranslog().totalLocal(0); + recoveryStartingSeqNoListener.onResponse(safeCommit.get().localCheckpoint + 1); + return; + } + + SubscribableListener + // First, start a temporary engine, recover the local translog up to the given checkpoint, and then close the engine again. + .newForked(l -> ActionListener.runWithResource(ActionListener.assertOnce(l), () -> () -> { + assert Thread.holdsLock(mutex) == false : "must not hold the mutex here"; + synchronized (engineMutex) { + IOUtils.close(currentEngineReference.getAndSet(null)); + } + }, (recoveryCompleteListener, ignoredRef) -> { + assert Thread.holdsLock(mutex) == false : "must not hold the mutex here"; final Engine.TranslogRecoveryRunner translogRecoveryRunner = (engine, snapshot) -> { recoveryState.getTranslog().totalLocal(snapshot.totalOperations()); final int recoveredOps = runTranslogRecovery( @@ -1805,29 +1833,34 @@ public long recoverLocallyUpToGlobalCheckpoint() { return recoveredOps; }; innerOpenEngineAndTranslog(() -> globalCheckpoint); - getEngine().recoverFromTranslog(translogRecoveryRunner, globalCheckpoint); - logger.trace("shard locally recovered up to {}", getEngine().getSeqNoStats(globalCheckpoint)); - } finally { - synchronized (engineMutex) { - IOUtils.close(currentEngineReference.getAndSet(null)); + getEngine().recoverFromTranslog(translogRecoveryRunner, globalCheckpoint, recoveryCompleteListener.map(v -> { + logger.trace("shard locally recovered up to {}", getEngine().getSeqNoStats(globalCheckpoint)); + return v; + })); + })) + // If the recovery replayed any operations then it will have created a new safe commit for the specified global checkpoint, + // which we can use for the rest of the recovery, so now we load the safe commit and use its local checkpoint as the recovery + // starting point. + .andThenApply(ignored -> { + assert Thread.holdsLock(mutex) == false : "must not hold the mutex here"; + try { + // we need to find the safe commit again as we should have created a new one during the local recovery + final Optional newSafeCommit = store.findSafeIndexCommit(globalCheckpoint); + assert newSafeCommit.isPresent() : "no safe commit found after local recovery"; + return newSafeCommit.get().localCheckpoint + 1; + } catch (Exception e) { + logger.debug( + () -> format( + "failed to find the safe commit after recovering shard locally up to global checkpoint %s", + globalCheckpoint + ), + e + ); + return UNASSIGNED_SEQ_NO; } - } - } catch (Exception e) { - logger.debug(() -> format("failed to recover shard locally up to global checkpoint %s", globalCheckpoint), e); - return UNASSIGNED_SEQ_NO; - } - try { - // we need to find the safe commit again as we should have created a new one during the local recovery - final Optional newSafeCommit = store.findSafeIndexCommit(globalCheckpoint); - assert newSafeCommit.isPresent() : "no safe commit found after local recovery"; - return newSafeCommit.get().localCheckpoint + 1; - } catch (Exception e) { - logger.debug( - () -> format("failed to find the safe commit after recovering shard locally up to global checkpoint %s", globalCheckpoint), - e - ); - return UNASSIGNED_SEQ_NO; - } + }) + + .addListener(recoveryStartingSeqNoListener); } public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) { diff --git a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java index c0271ad30d720..9ce2bc201c20f 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.Engine; @@ -35,7 +36,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; @@ -108,7 +108,7 @@ public class IndexingMemoryController implements IndexingOperationListener, Clos private final ShardsIndicesStatusChecker statusChecker; - private final Set pendingWriteIndexingBufferSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set pendingWriteIndexingBufferSet = ConcurrentCollections.newConcurrentSet(); private final Deque pendingWriteIndexingBufferQueue = new ConcurrentLinkedDeque<>(); IndexingMemoryController(Settings settings, ThreadPool threadPool, Iterable indexServices) { diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 39a302963d3d1..02288f84928e3 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; @@ -651,7 +650,7 @@ private void createShardWhenLockAvailable( listener.onResponse(true); } catch (ShardLockObtainFailedException e) { if (e.getCause() instanceof InterruptedException || Thread.currentThread().isInterrupted()) { - logger.warn(Strings.format("interrupted while creating shard [{}]", shardRouting), e); + logger.warn(format("interrupted while creating shard [%s]", shardRouting), e); listener.onFailure(e); return; } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 61545ada107b6..3447cc73a4288 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -375,26 +376,35 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str }); if (preExistingRequest == null) { - ActionListener.run(toSendListener.map(v -> { - logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId()); - indexShard.prepareForIndexRecovery(); - if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) { - // for searchable snapshots, peer recovery is treated similarly to recovery from snapshot - indexShard.getIndexEventListener().afterFilesRestoredFromRepository(indexShard); - final Store store = indexShard.store(); - store.incRef(); - try { - StoreRecovery.bootstrap(indexShard, store); - } finally { - store.decRef(); + SubscribableListener + // run pre-recovery activities + .newForked(indexShard::preRecovery) + // recover the shard as far as possible based on data held locally + .andThen((l, v) -> { + logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId()); + indexShard.prepareForIndexRecovery(); + if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) { + // for searchable snapshots, peer recovery is treated similarly to recovery from snapshot + indexShard.getIndexEventListener().afterFilesRestoredFromRepository(indexShard); + final Store store = indexShard.store(); + store.incRef(); + try { + StoreRecovery.bootstrap(indexShard, store); + } finally { + store.decRef(); + } } - } - final long startingSeqNo = indexShard.recoverLocallyUpToGlobalCheckpoint(); - assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG - : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]"; - final var startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo); - return new StartRecoveryRequestToSend(startRequest, PeerRecoverySourceService.Actions.START_RECOVERY, startRequest); - }), indexShard::preRecovery); + indexShard.recoverLocallyUpToGlobalCheckpoint(ActionListener.assertOnce(l)); + }) + // now construct the start-recovery request + .andThenApply(startingSeqNo -> { + assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG + : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]"; + final var startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo); + return new StartRecoveryRequestToSend(startRequest, PeerRecoverySourceService.Actions.START_RECOVERY, startRequest); + }) + // finally send the start-recovery request + .addListener(toSendListener); } else { toSendListener.onResponse( new StartRecoveryRequestToSend( @@ -849,7 +859,7 @@ public void handleException(TransportException e) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { // we do some heavy work like refreshes in the response so fork off to the generic threadpool return threadPool.generic(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java index acc33f5d85ea4..b1590a282fc8d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java @@ -580,7 +580,7 @@ public synchronized void totalOperationsOnStart(int total) { /** * Sets the total number of translog operations to be recovered locally before performing peer recovery - * @see IndexShard#recoverLocallyUpToGlobalCheckpoint() + * @see IndexShard#recoverLocallyUpToGlobalCheckpoint */ public synchronized void totalLocal(int totalLocal) { assert totalLocal >= recovered : totalLocal + " < " + recovered; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java index 4ace9ab1bc28d..9cf5851454d6c 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java @@ -43,7 +43,7 @@ public StartRecoveryRequest(StreamInput in) throws IOException { targetAllocationId = in.readString(); sourceNode = new DiscoveryNode(in); targetNode = new DiscoveryNode(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { clusterStateVersion = in.readVLong(); } else { clusterStateVersion = 0L; // bwc: do not wait for cluster state to be applied @@ -164,7 +164,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(targetAllocationId); sourceNode.writeTo(out); targetNode.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeVLong(clusterStateVersion); } // else bwc: just omit it, the receiver doesn't wait for a cluster state anyway metadataSnapshot.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java b/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java index 490f19fc9111c..583a38a9da41c 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java @@ -23,7 +23,7 @@ public class StatelessPrimaryRelocationAction { - public static final ActionType TYPE = ActionType.emptyResponse( + public static final ActionType TYPE = new ActionType<>( "internal:index/shard/recovery/stateless_primary_relocation" ); @@ -49,7 +49,7 @@ public Request(StreamInput in) throws IOException { shardId = new ShardId(in); targetNode = new DiscoveryNode(in); targetAllocationId = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { clusterStateVersion = in.readVLong(); } else { clusterStateVersion = 0L; // temporary bwc: do not wait for cluster state to be applied @@ -68,7 +68,7 @@ public void writeTo(StreamOutput out) throws IOException { shardId.writeTo(out); targetNode.writeTo(out); out.writeString(targetAllocationId); - if (out.getTransportVersion().onOrAfter(TransportVersions.WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeVLong(clusterStateVersion); } // temporary bwc: just omit it, the receiver doesn't wait for a cluster state anyway } diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index d631c7a11d10c..e97d76638455a 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -257,7 +257,7 @@ public ShardActiveResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java index ae9f7b730d60b..63ea457cd5a71 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java @@ -64,7 +64,7 @@ public class TransportNodesListShardStoreMetadata extends TransportNodesAction< private static final Logger logger = LogManager.getLogger(TransportNodesListShardStoreMetadata.class); public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final Settings settings; private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index b6ae21977e4bc..235de51d22572 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -87,6 +87,27 @@ default void init(Client client) {} */ void start(Model model, ActionListener listener); + /** + * Stop the model deployment. + * The default action does nothing except acknowledge the request (true). + * @param modelId The ID of the model to be stopped + * @param listener The listener + */ + default void stop(String modelId, ActionListener listener) { + listener.onResponse(true); + } + + /** + * Put the model definition (if applicable) + * The main purpose of this function is to download ELSER + * The default action does nothing except acknowledge the request (true). + * @param modelVariant The configuration of the model variant to be downloaded + * @param listener The listener + */ + default void putModel(Model modelVariant, ActionListener listener) { + listener.onResponse(true); + } + /** * Optionally test the new model configuration in the inference service. * This function should be called when the model is first created, the diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java index a0ed7bbd82b24..d5973807d9d78 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java @@ -9,9 +9,9 @@ package org.elasticsearch.inference; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -20,7 +20,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -public class InferenceServiceRegistry extends AbstractLifecycleComponent { +public class InferenceServiceRegistry implements Closeable { private final Map services; private final List namedWriteables = new ArrayList<>(); @@ -53,17 +53,9 @@ public List getNamedWriteables() { } @Override - protected void doStart() { - - } - - @Override - protected void doStop() { - - } - - @Override - protected void doClose() throws IOException { - + public void close() throws IOException { + for (var service : services.values()) { + service.close(); + } } } diff --git a/server/src/main/java/org/elasticsearch/inference/InputType.java b/server/src/main/java/org/elasticsearch/inference/InputType.java index f8bbea4ae121f..ffc67995c1dda 100644 --- a/server/src/main/java/org/elasticsearch/inference/InputType.java +++ b/server/src/main/java/org/elasticsearch/inference/InputType.java @@ -8,17 +8,12 @@ package org.elasticsearch.inference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; - -import java.io.IOException; import java.util.Locale; /** * Defines the type of request, whether the request is to ingest a document or search for a document. */ -public enum InputType implements Writeable { +public enum InputType { INGEST, SEARCH; @@ -29,12 +24,7 @@ public String toString() { return name().toLowerCase(Locale.ROOT); } - public static InputType fromStream(StreamInput in) throws IOException { - return in.readEnum(InputType.class); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeEnum(this); + public static InputType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); } } diff --git a/server/src/main/java/org/elasticsearch/inference/Model.java b/server/src/main/java/org/elasticsearch/inference/Model.java index 02be39d8a653d..81e1cf8de7827 100644 --- a/server/src/main/java/org/elasticsearch/inference/Model.java +++ b/server/src/main/java/org/elasticsearch/inference/Model.java @@ -23,12 +23,26 @@ public Model(ModelConfigurations configurations, ModelSecrets secrets) { this.secrets = Objects.requireNonNull(secrets); } + public Model(Model model, TaskSettings taskSettings) { + Objects.requireNonNull(model); + + configurations = ModelConfigurations.of(model, taskSettings); + secrets = model.getSecrets(); + } + + public Model(Model model, ServiceSettings serviceSettings) { + Objects.requireNonNull(model); + + configurations = ModelConfigurations.of(model, serviceSettings); + secrets = model.getSecrets(); + } + public Model(ModelConfigurations configurations) { this(configurations, new ModelSecrets()); } - public String getModelId() { - return configurations.getModelId(); + public String getInferenceEntityId() { + return configurations.getInferenceEntityId(); } public TaskType getTaskType() { diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java index cdccca7eb0c0e..a0e7ccff51796 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -27,7 +27,33 @@ public class ModelConfigurations implements ToXContentObject, VersionedNamedWrit public static final String TASK_SETTINGS = "task_settings"; private static final String NAME = "inference_model"; - private final String modelId; + public static ModelConfigurations of(Model model, TaskSettings taskSettings) { + Objects.requireNonNull(model); + Objects.requireNonNull(taskSettings); + + return new ModelConfigurations( + model.getConfigurations().getInferenceEntityId(), + model.getConfigurations().getTaskType(), + model.getConfigurations().getService(), + model.getServiceSettings(), + taskSettings + ); + } + + public static ModelConfigurations of(Model model, ServiceSettings serviceSettings) { + Objects.requireNonNull(model); + Objects.requireNonNull(serviceSettings); + + return new ModelConfigurations( + model.getConfigurations().getInferenceEntityId(), + model.getConfigurations().getTaskType(), + model.getConfigurations().getService(), + serviceSettings, + model.getTaskSettings() + ); + } + + private final String inferenceEntityId; private final TaskType taskType; private final String service; private final ServiceSettings serviceSettings; @@ -36,18 +62,18 @@ public class ModelConfigurations implements ToXContentObject, VersionedNamedWrit /** * Allows no task settings to be defined. This will default to the {@link EmptyTaskSettings} object. */ - public ModelConfigurations(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings) { - this(modelId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE); + public ModelConfigurations(String inferenceEntityId, TaskType taskType, String service, ServiceSettings serviceSettings) { + this(inferenceEntityId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE); } public ModelConfigurations( - String modelId, + String inferenceEntityId, TaskType taskType, String service, ServiceSettings serviceSettings, TaskSettings taskSettings ) { - this.modelId = Objects.requireNonNull(modelId); + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); this.taskType = Objects.requireNonNull(taskType); this.service = Objects.requireNonNull(service); this.serviceSettings = Objects.requireNonNull(serviceSettings); @@ -55,7 +81,7 @@ public ModelConfigurations( } public ModelConfigurations(StreamInput in) throws IOException { - this.modelId = in.readString(); + this.inferenceEntityId = in.readString(); this.taskType = in.readEnum(TaskType.class); this.service = in.readString(); this.serviceSettings = in.readNamedWriteable(ServiceSettings.class); @@ -64,15 +90,15 @@ public ModelConfigurations(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(modelId); + out.writeString(inferenceEntityId); out.writeEnum(taskType); out.writeString(service); out.writeNamedWriteable(serviceSettings); out.writeNamedWriteable(taskSettings); } - public String getModelId() { - return modelId; + public String getInferenceEntityId() { + return inferenceEntityId; } public TaskType getTaskType() { @@ -94,7 +120,7 @@ public TaskSettings getTaskSettings() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(MODEL_ID, modelId); + builder.field(MODEL_ID, inferenceEntityId); builder.field(TaskType.NAME, taskType.toString()); builder.field(SERVICE, service); builder.field(SERVICE_SETTINGS, serviceSettings); @@ -110,7 +136,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_074; + return TransportVersions.V_8_11_X; } @Override @@ -118,7 +144,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ModelConfigurations model = (ModelConfigurations) o; - return Objects.equals(modelId, model.modelId) + return Objects.equals(inferenceEntityId, model.inferenceEntityId) && taskType == model.taskType && Objects.equals(service, model.service) && Objects.equals(serviceSettings, model.serviceSettings) @@ -127,6 +153,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings); + return Objects.hash(inferenceEntityId, taskType, service, serviceSettings, taskSettings); } } diff --git a/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java index 78199ae3029ba..e81c2f50efc5f 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java @@ -69,7 +69,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED; + return TransportVersions.V_8_11_X; } @Override diff --git a/server/src/main/java/org/elasticsearch/internal/BuildExtension.java b/server/src/main/java/org/elasticsearch/internal/BuildExtension.java index 0d1a9880f8d32..921577317604a 100644 --- a/server/src/main/java/org/elasticsearch/internal/BuildExtension.java +++ b/server/src/main/java/org/elasticsearch/internal/BuildExtension.java @@ -19,4 +19,11 @@ public interface BuildExtension { * Returns the {@link Build} that represents the running Elasticsearch code. */ Build getCurrentBuild(); + + /** + * {@code true} if this build uses release versions. + */ + default boolean hasReleaseVersioning() { + return true; + } } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 476ad516aab80..8874c43c919ca 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; /** * NodeMetrics monitors various statistics of an Elasticsearch node and exposes them as metrics through @@ -36,17 +37,23 @@ public class NodeMetrics extends AbstractLifecycleComponent { private final NodeService nodeService; private final List metrics; private NodeStatsCache stats; + private final TimeValue cacheExpiry; /** * Constructs a new NodeMetrics instance. * - * @param meterRegistry The MeterRegistry used to register metrics. - * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. - */ - public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { + * @param meterRegistry The MeterRegistry used to register metrics. + * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. + * @param metricsInterval The interval at which the agent sends metrics to the APM Server + * */ + public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService, TimeValue metricsInterval) { this.registry = meterRegistry; this.nodeService = nodeService; this.metrics = new ArrayList<>(17); + // we set the cache to expire after half the interval at which the agent sends + // metrics to the APM Server so that there is enough time for the cache not + // update during the same poll period and that expires before a new poll period + this.cacheExpiry = new TimeValue(metricsInterval.getMillis() / 2); } /** @@ -56,16 +63,19 @@ public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { * @param registry The MeterRegistry used to register and collect metrics. */ private void registerAsyncMetrics(MeterRegistry registry) { - // Agent should poll stats every 4 minutes and being this cache is lazy we need a - // number high enough so that the cache does not update during the same poll - // period and that expires before a new poll period, therefore we choose 1 minute. - this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); + this.stats = new NodeStatsCache(cacheExpiry); metrics.add( registry.registerLongAsyncCounter( "es.indices.get.total", "Total number of get operations", "operation", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getGet()) + .map(o -> o.getCount()) + .orElse(0L) + ) ) ); @@ -74,7 +84,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.get.time", "Time in milliseconds spent performing get operations.", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getTimeInMillis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getGet()) + .map(o -> o.getTimeInMillis()) + .orElse(0L) + ) ) ); @@ -83,7 +99,14 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.search.fetch.total", "Total number of fetch operations.", "operation", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getSearch()) + .map(o -> o.getTotal()) + .map(o -> o.getFetchCount()) + .orElse(0L) + ) ) ); @@ -92,7 +115,14 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.search.fetch.time", "Time in milliseconds spent performing fetch operations.", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchTimeInMillis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getSearch()) + .map(o -> o.getTotal()) + .map(o -> o.getFetchTimeInMillis()) + .orElse(0L) + ) ) ); @@ -101,7 +131,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.merge.total", "Total number of merge operations.", "operation", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotal()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getMerge()) + .map(o -> o.getTotal()) + .orElse(0L) + ) ) ); @@ -110,7 +146,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.merge.time", "Time in milliseconds spent performing merge operations.", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotalTimeInMillis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getMerge()) + .map(o -> o.getTotalTimeInMillis()) + .orElse(0L) + ) ) ); @@ -119,7 +161,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.translog.operations.total", "Number of transaction log operations.", "operation", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getTranslog()) + .map(o -> o.estimatedNumberOfOperations()) + .orElse(0) + ) ) ); @@ -128,7 +176,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.translog.size", "Size, in bytes, of the transaction log.", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getTranslogSizeInBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getTranslog()) + .map(o -> o.getTranslogSizeInBytes()) + .orElse(0L) + ) ) ); @@ -137,7 +191,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.translog.uncommitted_operations.total", "Number of uncommitted transaction log operations.", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getTranslog()) + .map(o -> o.getUncommittedOperations()) + .orElse(0) + ) ) ); @@ -146,7 +206,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.translog.uncommitted_operations.size", "Size, in bytes, of uncommitted transaction log operations.", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedSizeInBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getTranslog()) + .map(o -> o.getUncommittedSizeInBytes()) + .orElse(0L) + ) ) ); @@ -155,7 +221,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.translog.earliest_last_modified.time", "Earliest last modified age for the transaction log.", "time", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getEarliestLastModifiedAge()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getTranslog()) + .map(o -> o.getEarliestLastModifiedAge()) + .orElse(0L) + ) ) ); @@ -164,7 +236,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.transport.rx.size", "Size, in bytes, of RX packets received by the node during internal cluster communication.", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getRxSize().getBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getTransport()) + .map(o -> o.getRxSize()) + .map(o -> o.getBytes()) + .orElse(0L) + ) ) ); @@ -173,7 +251,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.transport.tx.size", "Size, in bytes, of TX packets sent by the node during internal cluster communication.", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getTxSize().getBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getTransport()) + .map(o -> o.getTxSize()) + .map(o -> o.getBytes()) + .orElse(0L) + ) ) ); @@ -182,7 +266,9 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.jvm.mem.pools.young.size", "Memory, in bytes, used by the young generation heap.", "bytes", - () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.YOUNG)) + () -> new LongWithAttributes( + bytesUsedByGCGen(Optional.ofNullable(stats.getOrRefresh()).map(o -> o.getJvm()).map(o -> o.getMem()), GcNames.YOUNG) + ) ) ); @@ -191,7 +277,9 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.jvm.mem.pools.survivor.size", "Memory, in bytes, used by the survivor space.", "bytes", - () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.SURVIVOR)) + () -> new LongWithAttributes( + bytesUsedByGCGen(Optional.ofNullable(stats.getOrRefresh()).map(o -> o.getJvm()).map(o -> o.getMem()), GcNames.SURVIVOR) + ) ) ); @@ -200,7 +288,9 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.jvm.mem.pools.old.size", "Memory, in bytes, used by the old generation heap.", "bytes", - () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.OLD)) + () -> new LongWithAttributes( + bytesUsedByGCGen(Optional.ofNullable(stats.getOrRefresh()).map(o -> o.getJvm()).map(o -> o.getMem()), GcNames.OLD) + ) ) ); @@ -209,7 +299,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.fs.io_stats.time.total", "The total time in millis spent performing I/O operations across all devices used by Elasticsearch.", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getFs().getIoStats().getTotalIOTimeMillis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getFs()) + .map(o -> o.getIoStats()) + .map(o -> o.getTotalIOTimeMillis()) + .orElse(0L) + ) ) ); @@ -218,61 +314,112 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.docs.total", "Total number of indexed documents", "documents", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getIndexCount()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indexing.docs.total", + "es.indexing.docs.current.total", "Current number of indexing documents", "documents", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCurrent()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getIndexCurrent()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongAsyncCounter( - "es.indices.indexing.failed.total", + "es.indexing.indexing.failed.total", "Total number of failed indexing operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexFailedCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getIndexFailedCount()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongAsyncCounter( - "es.indices.deletion.docs.total", + "es.indexing.deletion.docs.total", "Total number of deleted documents", "documents", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getDeleteCount()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indices.deletion.docs.total", + "es.indexing.deletion.docs.current.total", "Current number of deleting documents", "documents", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCurrent()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getDeleteCurrent()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongAsyncCounter( - "es.indices.indexing.time", + "es.indexing.time", "Total indices indexing time", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexTime().millis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getIndexTime()) + .map(o -> o.millis()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongAsyncCounter( - "es.indices.deletion.time", + "es.deletion.time", "Total indices deletion time", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteTime().millis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getDeleteTime()) + .map(o -> o.millis()) + .orElse(0L) + ) ) ); @@ -281,7 +428,15 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.throttle.time", "Total indices throttle time", "milliseconds", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getThrottleTime().millis()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getThrottleTime()) + .map(o -> o.millis()) + .orElse(0L) + ) ) ); @@ -290,7 +445,14 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indices.noop.total", "Total number of noop shard operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getNoopUpdateCount()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndices()) + .map(o -> o.getIndexing()) + .map(o -> o.getTotal()) + .map(o -> o.getNoopUpdateCount()) + .orElse(0L) + ) ) ); @@ -299,7 +461,12 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.coordinating_operations.size", "Total number of memory bytes consumed by coordinating operations", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getTotalCoordinatingBytes()) + .orElse(0L) + ) ) ); @@ -308,25 +475,40 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.coordinating_operations.total", "Total number of coordinating operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingOps()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getTotalCoordinatingOps()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indexing.coordinating_operations.size", + "es.indexing.coordinating_operations.current.size", "Current number of memory bytes consumed by coordinating operations", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getCurrentCoordinatingBytes()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indexing.coordinating_operations.total", + "es.indexing.coordinating_operations.current.total", "Current number of coordinating operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingOps()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getCurrentCoordinatingOps()) + .orElse(0L) + ) ) ); @@ -335,7 +517,12 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.coordinating_operations.rejections.total", "Total number of coordinating operations rejections", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCoordinatingRejections()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getCoordinatingRejections()) + .orElse(0L) + ) ) ); @@ -344,7 +531,12 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.primary_operations.size", "Total number of memory bytes consumed by primary operations", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getTotalPrimaryBytes()) + .orElse(0L) + ) ) ); @@ -353,25 +545,40 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.primary_operations.total", "Total number of primary operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryOps()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getTotalPrimaryOps()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indexing.primary_operations.size", + "es.indexing.primary_operations.current.size", "Current number of memory bytes consumed by primary operations", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryBytes()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getCurrentPrimaryBytes()) + .orElse(0L) + ) ) ); metrics.add( registry.registerLongGauge( - "es.indexing.primary_operations.total", + "es.indexing.primary_operations.current.total", "Current number of primary operations", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryOps()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getCurrentPrimaryOps()) + .orElse(0L) + ) ) ); @@ -380,7 +587,12 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.primary_operations.rejections.total", "Total number of primary operations rejections", "operations", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getPrimaryRejections()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(o -> o.getIndexingPressureStats()) + .map(o -> o.getPrimaryRejections()) + .orElse(0L) + ) ) ); @@ -389,7 +601,9 @@ private void registerAsyncMetrics(MeterRegistry registry) { "es.indexing.memory.limit.size", "Current memory limit for primary and coordinating operations", "bytes", - () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getMemoryLimit()) + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()).map(o -> o.getIndexingPressureStats()).map(o -> o.getMemoryLimit()).orElse(0L) + ) ) ); @@ -398,18 +612,20 @@ private void registerAsyncMetrics(MeterRegistry registry) { /** * Retrieves the bytes used by a specific garbage collection generation from the provided JvmStats.Mem. * - * @param mem The JvmStats.Mem containing memory pool information. - * @param name The name of the garbage collection generation (e.g., "young", "survivor", "old"). + * @param optionalMem The JvmStats.Mem containing memory pool information. + * @param name The name of the garbage collection generation (e.g., "young", "survivor", "old"). * @return The number of bytes used by the specified garbage collection generation. */ - private long bytesUsedByGCGen(JvmStats.Mem mem, String name) { - long bytesUsed = 0; - for (JvmStats.MemoryPool pool : mem) { - if (pool.getName().equals(name)) { - bytesUsed = pool.getUsed().getBytes(); + private long bytesUsedByGCGen(Optional optionalMem, String name) { + return optionalMem.map(mem -> { + long bytesUsed = 0; + for (JvmStats.MemoryPool pool : mem) { + if (pool.getName().equals(name)) { + bytesUsed = pool.getUsed().getBytes(); + } } - } - return bytesUsed; + return bytesUsed; + }).orElse(0L); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 018abebdb7709..1dae328752bdc 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -78,6 +78,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; @@ -183,6 +184,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -241,8 +243,8 @@ static NodeConstruction prepareConstruction( NodeConstruction constructor = new NodeConstruction(closeables); Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); - - ThreadPool threadPool = constructor.createThreadPool(settings); + TelemetryProvider telemetryProvider = constructor.createTelemetryProvider(settings); + ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); @@ -257,7 +259,8 @@ static NodeConstruction prepareConstruction( scriptService, constructor.createAnalysisRegistry(), serviceProvider, - forbidPrivateIndexSettings + forbidPrivateIndexSettings, + telemetryProvider ); return constructor; @@ -448,9 +451,14 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr return settings; } - private ThreadPool createThreadPool(Settings settings) throws IOException { + private TelemetryProvider createTelemetryProvider(Settings settings) { + return getSinglePlugin(TelemetryPlugin.class).map(p -> p.getTelemetryProvider(settings)).orElse(TelemetryProvider.NOOP); + } + + private ThreadPool createThreadPool(Settings settings, MeterRegistry meterRegistry) throws IOException { ThreadPool threadPool = new ThreadPool( settings, + meterRegistry, pluginsService.flatMap(p -> p.getExecutorBuilders(settings)).toArray(ExecutorBuilder[]::new) ); resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS)); @@ -580,13 +588,12 @@ private void construct( ScriptService scriptService, AnalysisRegistry analysisRegistry, NodeServiceProvider serviceProvider, - boolean forbidPrivateIndexSettings + boolean forbidPrivateIndexSettings, + TelemetryProvider telemetryProvider ) throws IOException { Settings settings = settingsModule.getSettings(); - TelemetryProvider telemetryProvider = getSinglePlugin(TelemetryPlugin.class).map(p -> p.getTelemetryProvider(settings)) - .orElse(TelemetryProvider.NOOP); modules.bindToInstance(Tracer.class, telemetryProvider.getTracer()); TaskManager taskManager = new TaskManager( @@ -598,6 +605,7 @@ private void construct( ).collect(Collectors.toSet()), telemetryProvider.getTracer() ); + final Tracer tracer = telemetryProvider.getTracer(); ClusterService clusterService = createClusterService(settingsModule, threadPool, taskManager); clusterService.addStateApplier(scriptService); @@ -796,6 +804,7 @@ record PluginServiceInstances( ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), + namedWriteableRegistry, settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), @@ -965,7 +974,8 @@ record PluginServiceInstances( repositoryService ); - final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService); + final TimeValue metricsInterval = settings.getAsTime("tracing.apm.agent.metrics_interval", TimeValue.timeValueSeconds(10)); + final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService, metricsInterval); final SearchService searchService = serviceProvider.newSearchService( pluginsService, @@ -1249,8 +1259,7 @@ private void postInjection( transportService.getTaskManager(), () -> clusterService.localNode().getId(), transportService.getLocalNodeConnection(), - transportService.getRemoteClusterService(), - namedWriteableRegistry + transportService.getRemoteClusterService() ); logger.debug("initializing HTTP handlers ..."); diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index 7af206a12ecc9..44e86e056ef3b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -40,7 +40,7 @@ public class CompletionPersistentTaskAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index 32188d55e418a..63c97685c913e 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.Strings; @@ -300,9 +300,9 @@ private void cancelTask(Long allocationId) { if (task.markAsCancelled()) { // Cancel the local task using the task manager String reason = "task has been removed, cancelling locally"; - persistentTasksService.sendCancelRequest(task.getId(), reason, new ActionListener() { + persistentTasksService.sendCancelRequest(task.getId(), reason, new ActionListener<>() { @Override - public void onResponse(CancelTasksResponse cancelTasksResponse) { + public void onResponse(ListTasksResponse cancelTasksResponse) { logger.trace( "Persistent task [{}] with id [{}] and allocation id [{}] was cancelled", task.getAction(), diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index 869a93110d257..227569341919a 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; @@ -90,7 +90,7 @@ public void sendCompletionRequest( /** * Cancels a locally running task using the Task Manager API */ - void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { CancelTasksRequest request = new CancelTasksRequest(); request.setTargetTaskId(new TaskId(clusterService.localNode().getId(), taskId)); request.setReason(reason); diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 8e0ee8f87422e..1fbdd03dcc268 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -34,7 +34,7 @@ public class RemovePersistentTaskAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index d98abdffaf463..7dbb458354752 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -40,7 +40,7 @@ public class StartPersistentTaskAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index f961a9fffec27..dcf86f85eb709 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -36,7 +36,7 @@ public class UpdatePersistentTaskStatusAction extends ActionType { diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 095eec2811edc..18e21094fc11d 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,7 +29,6 @@ import java.util.Collection; import java.util.Collections; -import java.util.List; import java.util.Objects; import java.util.function.Supplier; @@ -37,7 +37,7 @@ *

    {@code
      *   {@literal @}Override
      *   public List> getActions() {
    - *       return Arrays.asList(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class),
    + *       return List.of(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class),
      *               new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class),
      *               new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class),
      *               new ActionHandler<>(RethrottleAction.INSTANCE, TransportRethrottleAction.class));
    @@ -48,22 +48,23 @@ public interface ActionPlugin {
         /**
          * Actions added by this plugin.
          */
    -    default List> getActions() {
    +    default Collection> getActions() {
             return Collections.emptyList();
         }
     
         /**
          * ActionType filters added by this plugin.
          */
    -    default List getActionFilters() {
    +    default Collection getActionFilters() {
             return Collections.emptyList();
         }
     
         /**
          * Rest handlers added by this plugin.
          */
    -    default List getRestHandlers(
    +    default Collection getRestHandlers(
             Settings settings,
    +        NamedWriteableRegistry namedWriteableRegistry,
             RestController restController,
             ClusterSettings clusterSettings,
             IndexScopedSettings indexScopedSettings,
    diff --git a/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java
    index 35badffe0b3aa..44653dcf8b5fe 100644
    --- a/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java
    +++ b/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java
    @@ -14,7 +14,7 @@
     import org.elasticsearch.indices.breaker.CircuitBreakerService;
     import org.elasticsearch.plugins.ActionPlugin;
     import org.elasticsearch.rest.RestController;
    -import org.elasticsearch.rest.RestHandler;
    +import org.elasticsearch.rest.RestInterceptor;
     import org.elasticsearch.telemetry.tracing.Tracer;
     import org.elasticsearch.usage.UsageService;
     
    @@ -46,7 +46,7 @@ public interface RestServerActionPlugin extends ActionPlugin {
          *
          * Note: Only one installed plugin may implement a rest interceptor.
          */
    -    UnaryOperator getRestHandlerInterceptor(ThreadContext threadContext);
    +    RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext);
     
         /**
          * Returns a replacement {@link RestController} to be used in the server.
    @@ -54,7 +54,7 @@ public interface RestServerActionPlugin extends ActionPlugin {
          */
         @Nullable
         default RestController getRestController(
    -        @Nullable UnaryOperator handlerWrapper,
    +        @Nullable RestInterceptor interceptor,
             NodeClient client,
             CircuitBreakerService circuitBreakerService,
             UsageService usageService,
    diff --git a/server/src/main/java/org/elasticsearch/repositories/FinalizeSnapshotContext.java b/server/src/main/java/org/elasticsearch/repositories/FinalizeSnapshotContext.java
    index 9129f6abd373c..b459e1cfc7338 100644
    --- a/server/src/main/java/org/elasticsearch/repositories/FinalizeSnapshotContext.java
    +++ b/server/src/main/java/org/elasticsearch/repositories/FinalizeSnapshotContext.java
    @@ -100,7 +100,7 @@ public Map> obsoleteShardGenerations() {
         }
     
         public ClusterState updatedClusterState(ClusterState state) {
    -        final ClusterState updatedState = SnapshotsService.stateWithoutSnapshot(state, snapshotInfo.snapshot());
    +        final ClusterState updatedState = SnapshotsService.stateWithoutSnapshot(state, snapshotInfo.snapshot(), updatedShardGenerations);
             obsoleteGenerations.set(
                 SnapshotsInProgress.get(updatedState).obsoleteGenerations(snapshotInfo.repository(), SnapshotsInProgress.get(state))
             );
    diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java
    index 722779a646824..b9cce9e3ec500 100644
    --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java
    +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java
    @@ -28,7 +28,7 @@ public class RepositoriesStats implements Writeable, ToXContentFragment {
         private final Map repositoryThrottlingStats;
     
         public RepositoriesStats(StreamInput in) throws IOException {
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 repositoryThrottlingStats = in.readMap(ThrottlingStats::new);
             } else {
                 repositoryThrottlingStats = new HashMap<>();
    @@ -41,7 +41,7 @@ public RepositoriesStats(Map repositoryThrottlingStats)
     
         @Override
         public void writeTo(StreamOutput out) throws IOException {
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeMap(repositoryThrottlingStats, StreamOutput::writeWriteable);
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java
    index e42552d3e5f3c..4c34f2e192a26 100644
    --- a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java
    +++ b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java
    @@ -142,6 +142,11 @@ public ShardGeneration getShardGen(IndexId indexId, int shardId) {
             return generations.get(shardId);
         }
     
    +    public boolean hasShardGen(RepositoryShardId repositoryShardId) {
    +        final var indexShardGens = getGens(repositoryShardId.index());
    +        return repositoryShardId.shardId() < indexShardGens.size() && indexShardGens.get(repositoryShardId.shardId()) != null;
    +    }
    +
         public List getGens(IndexId indexId) {
             return shardGenerations.getOrDefault(indexId, Collections.emptyList());
         }
    diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
    index d940415c38916..bc4e0b3167f1b 100644
    --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
    +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
    @@ -93,7 +93,7 @@ public void verify(String repository, String verificationToken, final ActionList
                         new VerifyNodeRepositoryRequest(repository, verificationToken),
                         new TransportResponseHandler.Empty() {
                             @Override
    -                        public Executor executor(ThreadPool threadPool) {
    +                        public Executor executor() {
                                 return TransportResponseHandler.TRANSPORT_WORKER;
                             }
     
    diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
    index 48caafc6bfab8..b8b0498d95125 100644
    --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
    +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
    @@ -1741,6 +1741,7 @@ private void cleanupOldMetadata(
                         (indexId, gens) -> gens.forEach(
                             (shardId, oldGen) -> toDelete.add(
                                 shardPath(indexId, shardId).buildAsString().substring(prefixPathLen) + INDEX_FILE_PREFIX + oldGen
    +                                .toBlobNamePart()
                             )
                         )
                     );
    diff --git a/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBody.java b/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBody.java
    index ae267573b4cab..5c41be0fc9f9f 100644
    --- a/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBody.java
    +++ b/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBody.java
    @@ -15,8 +15,6 @@
     import org.elasticsearch.common.xcontent.ChunkedToXContent;
     import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.core.IOUtils;
    -import org.elasticsearch.core.Nullable;
    -import org.elasticsearch.core.Releasable;
     import org.elasticsearch.core.Releasables;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.core.Streams;
    @@ -36,7 +34,7 @@
      * The body of a rest response that uses chunked HTTP encoding. Implementations are used to avoid materializing full responses on heap and
      * instead serialize only as much of the response as can be flushed to the network right away.
      */
    -public interface ChunkedRestResponseBody extends Releasable {
    +public interface ChunkedRestResponseBody {
     
         Logger logger = LogManager.getLogger(ChunkedRestResponseBody.class);
     
    @@ -67,15 +65,10 @@ public interface ChunkedRestResponseBody extends Releasable {
          * @param chunkedToXContent chunked x-content instance to serialize
          * @param params parameters to use for serialization
          * @param channel channel the response will be written to
    -     * @param releasable resource to release when the response is fully sent, or {@code null} if nothing to release
          * @return chunked rest response body
          */
    -    static ChunkedRestResponseBody fromXContent(
    -        ChunkedToXContent chunkedToXContent,
    -        ToXContent.Params params,
    -        RestChannel channel,
    -        @Nullable Releasable releasable
    -    ) throws IOException {
    +    static ChunkedRestResponseBody fromXContent(ChunkedToXContent chunkedToXContent, ToXContent.Params params, RestChannel channel)
    +        throws IOException {
     
             return new ChunkedRestResponseBody() {
     
    @@ -146,23 +139,14 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
                 public String getResponseContentTypeString() {
                     return builder.getResponseContentTypeString();
                 }
    -
    -            @Override
    -            public void close() {
    -                Releasables.closeExpectNoException(releasable);
    -            }
             };
         }
     
         /**
          * Create a chunked response body to be written to a specific {@link RestChannel} from a stream of text chunks, each represented as a
    -     * consumer of a {@link Writer}. The last chunk that the iterator yields must write at least one byte.
    +     * consumer of a {@link Writer}.
          */
    -    static ChunkedRestResponseBody fromTextChunks(
    -        String contentType,
    -        Iterator> chunkIterator,
    -        @Nullable Releasable releasable
    -    ) {
    +    static ChunkedRestResponseBody fromTextChunks(String contentType, Iterator> chunkIterator) {
             return new ChunkedRestResponseBody() {
                 private RecyclerBytesStreamOutput currentOutput;
                 private final Writer writer = new OutputStreamWriter(new OutputStream() {
    @@ -235,11 +219,6 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
                 public String getResponseContentTypeString() {
                     return contentType;
                 }
    -
    -            @Override
    -            public void close() {
    -                Releasables.closeExpectNoException(releasable);
    -            }
             };
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/rest/LoggingChunkedRestResponseBody.java b/server/src/main/java/org/elasticsearch/rest/LoggingChunkedRestResponseBody.java
    index 00b56d0e05051..0508828c70da1 100644
    --- a/server/src/main/java/org/elasticsearch/rest/LoggingChunkedRestResponseBody.java
    +++ b/server/src/main/java/org/elasticsearch/rest/LoggingChunkedRestResponseBody.java
    @@ -46,9 +46,4 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
         public String getResponseContentTypeString() {
             return inner.getResponseContentTypeString();
         }
    -
    -    @Override
    -    public void close() {
    -        inner.close();
    -    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java
    index 6a5d6f99df64b..2ebee9c59482e 100644
    --- a/server/src/main/java/org/elasticsearch/rest/RestController.java
    +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java
    @@ -13,6 +13,8 @@
     import org.apache.logging.log4j.Logger;
     import org.apache.lucene.util.BytesRef;
     import org.elasticsearch.ElasticsearchException;
    +import org.elasticsearch.ElasticsearchStatusException;
    +import org.elasticsearch.action.ActionListener;
     import org.elasticsearch.client.internal.node.NodeClient;
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.breaker.CircuitBreaker;
    @@ -24,9 +26,10 @@
     import org.elasticsearch.common.path.PathTrie;
     import org.elasticsearch.common.recycler.Recycler;
     import org.elasticsearch.common.util.Maps;
    -import org.elasticsearch.common.util.concurrent.RunOnce;
     import org.elasticsearch.common.util.concurrent.ThreadContext;
     import org.elasticsearch.core.Nullable;
    +import org.elasticsearch.core.Releasable;
    +import org.elasticsearch.core.Releasables;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.core.Streams;
     import org.elasticsearch.core.TimeValue;
    @@ -37,6 +40,7 @@
     import org.elasticsearch.rest.RestHandler.Route;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.telemetry.tracing.Tracer;
    +import org.elasticsearch.transport.Transports;
     import org.elasticsearch.usage.SearchUsageHolder;
     import org.elasticsearch.usage.UsageService;
     import org.elasticsearch.xcontent.XContentBuilder;
    @@ -56,8 +60,8 @@
     import java.util.SortedMap;
     import java.util.TreeMap;
     import java.util.concurrent.atomic.AtomicBoolean;
    +import java.util.concurrent.atomic.AtomicReference;
     import java.util.function.Supplier;
    -import java.util.function.UnaryOperator;
     
     import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY;
     import static org.elasticsearch.indices.SystemIndices.SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY;
    @@ -95,7 +99,7 @@ public class RestController implements HttpServerTransport.Dispatcher {
     
         private final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER);
     
    -    private final UnaryOperator handlerWrapper;
    +    private final RestInterceptor interceptor;
     
         private final NodeClient client;
     
    @@ -107,7 +111,7 @@ public class RestController implements HttpServerTransport.Dispatcher {
         private final ServerlessApiProtections apiProtections;
     
         public RestController(
    -        UnaryOperator handlerWrapper,
    +        RestInterceptor restInterceptor,
             NodeClient client,
             CircuitBreakerService circuitBreakerService,
             UsageService usageService,
    @@ -115,10 +119,10 @@ public RestController(
         ) {
             this.usageService = usageService;
             this.tracer = tracer;
    -        if (handlerWrapper == null) {
    -            handlerWrapper = h -> h; // passthrough if no wrapper set
    +        if (restInterceptor == null) {
    +            restInterceptor = (request, channel, targetHandler, listener) -> listener.onResponse(Boolean.TRUE);
             }
    -        this.handlerWrapper = handlerWrapper;
    +        this.interceptor = restInterceptor;
             this.client = client;
             this.circuitBreakerService = circuitBreakerService;
             registerHandlerNoWrap(RestRequest.Method.GET, "/favicon.ico", RestApiVersion.current(), new RestFavIconHandler());
    @@ -264,7 +268,7 @@ protected void registerHandler(RestRequest.Method method, String path, RestApiVe
             if (handler instanceof BaseRestHandler) {
                 usageService.addRestHandler((BaseRestHandler) handler);
             }
    -        registerHandlerNoWrap(method, path, version, handlerWrapper.apply(handler));
    +        registerHandlerNoWrap(method, path, version, handler);
         }
     
         private void registerHandlerNoWrap(RestRequest.Method method, String path, RestApiVersion version, RestHandler handler) {
    @@ -325,7 +329,7 @@ public void dispatchRequest(RestRequest request, RestChannel channel, ThreadCont
                 tryAllHandlers(request, channel, threadContext);
             } catch (Exception e) {
                 try {
    -                channel.sendResponse(new RestResponse(channel, e));
    +                sendFailure(channel, e);
                 } catch (Exception inner) {
                     inner.addSuppressed(e);
                     logger.error(() -> "failed to send failure response for uri [" + request.uri() + "]", inner);
    @@ -348,7 +352,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th
                 // unless it's a http headers validation error, we consider any exceptions encountered so far during request processing
                 // to be a problem of invalid/malformed request (hence the RestStatus#BAD_REQEST (400) HTTP response code)
                 if (e instanceof HttpHeadersValidationException) {
    -                channel.sendResponse(new RestResponse(channel, (Exception) e.getCause()));
    +                sendFailure(channel, (Exception) e.getCause());
                 } else {
                     channel.sendResponse(new RestResponse(channel, BAD_REQUEST, e));
                 }
    @@ -438,12 +442,44 @@ private void dispatchRequest(
                 } else {
                     threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
                 }
    -            handler.handleRequest(request, responseChannel, client);
    +            final var finalChannel = responseChannel;
    +            this.interceptor.intercept(request, responseChannel, handler.getConcreteRestHandler(), new ActionListener<>() {
    +                @Override
    +                public void onResponse(Boolean processRequest) {
    +                    if (processRequest) {
    +                        try {
    +                            validateRequest(request, handler, client);
    +                            handler.handleRequest(request, finalChannel, client);
    +                        } catch (Exception e) {
    +                            onFailure(e);
    +                        }
    +                    }
    +                }
    +
    +                @Override
    +                public void onFailure(Exception e) {
    +                    try {
    +                        sendFailure(finalChannel, e);
    +                    } catch (IOException ex) {
    +                        logger.info("Failed to send error [{}] to HTTP client", ex.toString());
    +                    }
    +                }
    +            });
             } catch (Exception e) {
    -            responseChannel.sendResponse(new RestResponse(responseChannel, e));
    +            sendFailure(responseChannel, e);
             }
         }
     
    +    /**
    +     * Validates that the request should be allowed. Throws an exception if the request should be rejected.
    +     */
    +    @SuppressWarnings("unused")
    +    protected void validateRequest(RestRequest request, RestHandler handler, NodeClient client) throws ElasticsearchStatusException {}
    +
    +    private static void sendFailure(RestChannel responseChannel, Exception e) throws IOException {
    +        responseChannel.sendResponse(new RestResponse(responseChannel, e));
    +    }
    +
         /**
          * in order to prevent CSRF we have to reject all media types that are from a browser safelist
          * see https://fetch.spec.whatwg.org/#cors-safelisted-request-header
    @@ -691,7 +727,7 @@ public static void handleBadRequest(String uri, RestRequest.Method method, RestC
         public static void handleServerlessRequestToProtectedResource(String uri, RestRequest.Method method, RestChannel channel)
             throws IOException {
             String msg = "uri [" + uri + "] with method [" + method + "] exists but is not available when running in serverless mode";
    -        channel.sendResponse(new RestResponse(channel, new ApiNotAvailableException(msg)));
    +        sendFailure(channel, new ApiNotAvailableException(msg));
         }
     
         /**
    @@ -791,10 +827,18 @@ public void sendResponse(RestResponse response) {
                     if (response.isChunked() == false) {
                         methodHandlers.addResponseStats(response.content().length());
                     } else {
    +                    final var responseLengthRecorder = new ResponseLengthRecorder(methodHandlers);
    +                    final var headers = response.getHeaders();
                         response = RestResponse.chunked(
                             response.status(),
    -                        new EncodedLengthTrackingChunkedRestResponseBody(response.chunkedContent(), methodHandlers)
    +                        new EncodedLengthTrackingChunkedRestResponseBody(response.chunkedContent(), responseLengthRecorder),
    +                        Releasables.wrap(responseLengthRecorder, response)
                         );
    +                    for (final var header : headers.entrySet()) {
    +                        for (final var value : header.getValue()) {
    +                            response.addHeader(header.getKey(), value);
    +                        }
    +                    }
                     }
                     delegate.sendResponse(response);
                     success = true;
    @@ -818,15 +862,44 @@ private void close() {
             }
         }
     
    +    private static class ResponseLengthRecorder extends AtomicReference implements Releasable {
    +        private long responseLength;
    +
    +        private ResponseLengthRecorder(MethodHandlers methodHandlers) {
    +            super(methodHandlers);
    +        }
    +
    +        @Override
    +        public void close() {
    +            // closed just before sending the last chunk, and also when the whole RestResponse is closed since the client might abort the
    +            // connection before we send the last chunk, in which case we won't have recorded the response in the
    +            // stats yet; thus we need run-once semantics here:
    +            final var methodHandlers = getAndSet(null);
    +            if (methodHandlers != null) {
    +                // if we started sending chunks then we're closed on the transport worker, no need for sync
    +                assert responseLength == 0L || Transports.assertTransportThread();
    +                methodHandlers.addResponseStats(responseLength);
    +            }
    +        }
    +
    +        void addChunkLength(long chunkLength) {
    +            assert chunkLength >= 0L : chunkLength;
    +            assert Transports.assertTransportThread(); // always called on the transport worker, no need for sync
    +            responseLength += chunkLength;
    +        }
    +    }
    +
         private static class EncodedLengthTrackingChunkedRestResponseBody implements ChunkedRestResponseBody {
     
             private final ChunkedRestResponseBody delegate;
    -        private final RunOnce onCompletion;
    -        private long encodedLength = 0;
    +        private final ResponseLengthRecorder responseLengthRecorder;
     
    -        private EncodedLengthTrackingChunkedRestResponseBody(ChunkedRestResponseBody delegate, MethodHandlers methodHandlers) {
    +        private EncodedLengthTrackingChunkedRestResponseBody(
    +            ChunkedRestResponseBody delegate,
    +            ResponseLengthRecorder responseLengthRecorder
    +        ) {
                 this.delegate = delegate;
    -            this.onCompletion = new RunOnce(() -> methodHandlers.addResponseStats(encodedLength));
    +            this.responseLengthRecorder = responseLengthRecorder;
             }
     
             @Override
    @@ -837,9 +910,9 @@ public boolean isDone() {
             @Override
             public ReleasableBytesReference encodeChunk(int sizeHint, Recycler recycler) throws IOException {
                 final ReleasableBytesReference bytesReference = delegate.encodeChunk(sizeHint, recycler);
    -            encodedLength += bytesReference.length();
    +            responseLengthRecorder.addChunkLength(bytesReference.length());
                 if (isDone()) {
    -                onCompletion.run();
    +                responseLengthRecorder.close();
                 }
                 return bytesReference;
             }
    @@ -848,14 +921,6 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
             public String getResponseContentTypeString() {
                 return delegate.getResponseContentTypeString();
             }
    -
    -        @Override
    -        public void close() {
    -            delegate.close();
    -            // the client might close the connection before we send the last chunk, in which case we won't have recorded the response in the
    -            // stats yet, so we do it now:
    -            onCompletion.run();
    -        }
         }
     
         private static CircuitBreaker inFlightRequestsBreaker(CircuitBreakerService circuitBreakerService) {
    diff --git a/server/src/main/java/org/elasticsearch/rest/RestInterceptor.java b/server/src/main/java/org/elasticsearch/rest/RestInterceptor.java
    new file mode 100644
    index 0000000000000..dd0d444073040
    --- /dev/null
    +++ b/server/src/main/java/org/elasticsearch/rest/RestInterceptor.java
    @@ -0,0 +1,27 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.rest;
    +
    +import org.elasticsearch.action.ActionListener;
    +
    +/**
    + * Wraps the execution of a {@link RestHandler}
    + */
    +@FunctionalInterface
    +public interface RestInterceptor {
    +
    +    /**
    +     * @param listener The interceptor responds with {@code True} if the handler should be called,
    +     *                 or {@code False} if the request has been entirely handled by the interceptor.
    +     *                 In the case of {@link ActionListener#onFailure(Exception)}, the target handler
    +     *                 will not be called, the request will be treated as unhandled, and the regular
    +     *                 rest exception handling will be performed
    +     */
    +    void intercept(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener listener) throws Exception;
    +}
    diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java
    index 55adc67bf18e6..a4a44a5a65561 100644
    --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java
    +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java
    @@ -12,16 +12,15 @@
     import org.apache.logging.log4j.Logger;
     import org.apache.logging.log4j.util.Supplier;
     import org.elasticsearch.ElasticsearchException;
    -import org.elasticsearch.ElasticsearchStatusException;
     import org.elasticsearch.ExceptionsHelper;
     import org.elasticsearch.common.bytes.BytesArray;
     import org.elasticsearch.common.bytes.BytesReference;
    -import org.elasticsearch.common.bytes.ReleasableBytesReference;
     import org.elasticsearch.common.util.Maps;
     import org.elasticsearch.core.Nullable;
    +import org.elasticsearch.core.Releasable;
    +import org.elasticsearch.core.Releasables;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
     import java.util.ArrayList;
    @@ -33,14 +32,13 @@
     
     import static java.util.Collections.singletonMap;
     import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE;
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER;
     
    -public final class RestResponse {
    +public final class RestResponse implements Releasable {
     
         public static final String TEXT_CONTENT_TYPE = "text/plain; charset=UTF-8";
     
    -    private static final String STATUS = "status";
    +    static final String STATUS = "status";
     
         private static final Logger SUPPRESSED_ERROR_LOGGER = LogManager.getLogger("rest.suppressed");
     
    @@ -54,6 +52,9 @@ public final class RestResponse {
         private final String responseMediaType;
         private Map> customHeaders;
     
    +    @Nullable
    +    private final Releasable releasable;
    +
         /**
          * Creates a new response based on {@link XContentBuilder}.
          */
    @@ -76,18 +77,18 @@ public RestResponse(RestStatus status, String responseMediaType, String content)
         }
     
         public RestResponse(RestStatus status, String responseMediaType, BytesReference content) {
    -        this(status, responseMediaType, content, null);
    +        this(status, responseMediaType, content, null, null);
         }
     
    -    public static RestResponse chunked(RestStatus restStatus, ChunkedRestResponseBody content) {
    +    private RestResponse(RestStatus status, String responseMediaType, BytesReference content, @Nullable Releasable releasable) {
    +        this(status, responseMediaType, content, null, releasable);
    +    }
    +
    +    public static RestResponse chunked(RestStatus restStatus, ChunkedRestResponseBody content, @Nullable Releasable releasable) {
             if (content.isDone()) {
    -            return new RestResponse(
    -                restStatus,
    -                content.getResponseContentTypeString(),
    -                new ReleasableBytesReference(BytesArray.EMPTY, content)
    -            );
    +            return new RestResponse(restStatus, content.getResponseContentTypeString(), BytesArray.EMPTY, releasable);
             } else {
    -            return new RestResponse(restStatus, content.getResponseContentTypeString(), null, content);
    +            return new RestResponse(restStatus, content.getResponseContentTypeString(), null, content, releasable);
             }
         }
     
    @@ -98,12 +99,14 @@ private RestResponse(
             RestStatus status,
             String responseMediaType,
             @Nullable BytesReference content,
    -        @Nullable ChunkedRestResponseBody chunkedResponseBody
    +        @Nullable ChunkedRestResponseBody chunkedResponseBody,
    +        @Nullable Releasable releasable
         ) {
             this.status = status;
             this.content = content;
             this.responseMediaType = responseMediaType;
             this.chunkedResponseBody = chunkedResponseBody;
    +        this.releasable = releasable;
             assert (content == null) != (chunkedResponseBody == null);
         }
     
    @@ -122,7 +125,7 @@ public RestResponse(RestChannel channel, RestStatus status, Exception e) throws
                     channel.request().params(),
                     status.getStatus()
                 );
    -            if (status.getStatus() < 500) {
    +            if (status.getStatus() < 500 || ExceptionsHelper.isNodeOrShardUnavailableTypeException(e)) {
                     SUPPRESSED_ERROR_LOGGER.debug(messageSupplier, e);
                 } else {
                     SUPPRESSED_ERROR_LOGGER.warn(messageSupplier, e);
    @@ -145,6 +148,7 @@ public RestResponse(RestChannel channel, RestStatus status, Exception e) throws
                 copyHeaders(((ElasticsearchException) e));
             }
             this.chunkedResponseBody = null;
    +        this.releasable = null;
         }
     
         public String contentType() {
    @@ -189,42 +193,6 @@ static RestResponse createSimpleErrorResponse(RestChannel channel, RestStatus st
             );
         }
     
    -    public static ElasticsearchStatusException errorFromXContent(XContentParser parser) throws IOException {
    -        XContentParser.Token token = parser.nextToken();
    -        ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    -
    -        ElasticsearchException exception = null;
    -        RestStatus status = null;
    -
    -        String currentFieldName = null;
    -        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -            if (token == XContentParser.Token.FIELD_NAME) {
    -                currentFieldName = parser.currentName();
    -            }
    -            if (STATUS.equals(currentFieldName)) {
    -                if (token != XContentParser.Token.FIELD_NAME) {
    -                    ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
    -                    status = RestStatus.fromCode(parser.intValue());
    -                }
    -            } else {
    -                exception = ElasticsearchException.failureFromXContent(parser);
    -            }
    -        }
    -
    -        if (exception == null) {
    -            throw new IllegalStateException("Failed to parse elasticsearch status exception: no exception was found");
    -        }
    -
    -        ElasticsearchStatusException result = new ElasticsearchStatusException(exception.getMessage(), status, exception.getCause());
    -        for (String header : exception.getHeaderKeys()) {
    -            result.addHeader(header, exception.getHeader(header));
    -        }
    -        for (String metadata : exception.getMetadataKeys()) {
    -            result.addMetadata(metadata, exception.getMetadata(metadata));
    -        }
    -        return result;
    -    }
    -
         public void copyHeaders(ElasticsearchException ex) {
             Set headerKeySet = ex.getHeaderKeys();
             if (customHeaders == null) {
    @@ -263,4 +231,9 @@ public Map> filterHeaders(Map> headers
             }
             return headers;
         }
    +
    +    @Override
    +    public void close() {
    +        Releasables.closeExpectNoException(releasable);
    +    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestChunkedToXContentListener.java b/server/src/main/java/org/elasticsearch/rest/action/RestChunkedToXContentListener.java
    index 2edb042ea23e8..3798f2b6b6fb1 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/RestChunkedToXContentListener.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/RestChunkedToXContentListener.java
    @@ -40,7 +40,8 @@ protected void processResponse(Response response) throws IOException {
             channel.sendResponse(
                 RestResponse.chunked(
                     getRestStatus(response),
    -                ChunkedRestResponseBody.fromXContent(response, params, channel, releasableFromResponse(response))
    +                ChunkedRestResponseBody.fromXContent(response, params, channel),
    +                releasableFromResponse(response)
                 )
             );
         }
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java
    index 27bcd82075f04..4ecd784ecd37c 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java
    @@ -9,6 +9,7 @@
     package org.elasticsearch.rest.action.admin.cluster;
     
     import org.elasticsearch.action.admin.cluster.desirednodes.TransportDeleteDesiredNodesAction;
    +import org.elasticsearch.action.support.master.AcknowledgedRequest;
     import org.elasticsearch.client.internal.node.NodeClient;
     import org.elasticsearch.rest.BaseRestHandler;
     import org.elasticsearch.rest.RestRequest;
    @@ -30,7 +31,7 @@ public List routes() {
     
         @Override
         protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
    -        final TransportDeleteDesiredNodesAction.Request deleteDesiredNodesRequest = new TransportDeleteDesiredNodesAction.Request();
    +        final AcknowledgedRequest.Plain deleteDesiredNodesRequest = new AcknowledgedRequest.Plain();
             deleteDesiredNodesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteDesiredNodesRequest.masterNodeTimeout()));
             return restChannel -> client.execute(
                 TransportDeleteDesiredNodesAction.TYPE,
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java
    index 2942e59aa1bfd..bc0750f16e0e7 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java
    @@ -117,7 +117,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
                 @Override
                 public RestResponse buildResponse(NodesHotThreadsResponse response) {
                     response.mustIncRef();
    -                return RestResponse.chunked(RestStatus.OK, fromTextChunks(TEXT_CONTENT_TYPE, response.getTextChunks(), response::decRef));
    +                return RestResponse.chunked(RestStatus.OK, fromTextChunks(TEXT_CONTENT_TYPE, response.getTextChunks()), response::decRef);
                 }
             });
         }
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java
    index 1f705f3530493..07c54fd258845 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java
    @@ -11,15 +11,30 @@
     import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction;
     import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest;
     import org.elasticsearch.client.internal.node.NodeClient;
    +import org.elasticsearch.cluster.metadata.DesiredNode;
    +import org.elasticsearch.common.logging.DeprecationLogger;
    +import org.elasticsearch.features.NodeFeature;
     import org.elasticsearch.rest.BaseRestHandler;
     import org.elasticsearch.rest.RestRequest;
     import org.elasticsearch.rest.action.RestToXContentListener;
    +import org.elasticsearch.xcontent.XContentParseException;
     import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
     import java.util.List;
    +import java.util.function.Predicate;
     
     public class RestUpdateDesiredNodesAction extends BaseRestHandler {
    +
    +    private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateDesiredNodesAction.class);
    +    private static final String VERSION_DEPRECATION_MESSAGE =
    +        "[version removal] Specifying node_version in desired nodes requests is deprecated.";
    +    private final Predicate clusterSupportsFeature;
    +
    +    public RestUpdateDesiredNodesAction(Predicate clusterSupportsFeature) {
    +        this.clusterSupportsFeature = clusterSupportsFeature;
    +    }
    +
         @Override
         public String getName() {
             return "update_desired_nodes";
    @@ -41,6 +56,16 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli
                 updateDesiredNodesRequest = UpdateDesiredNodesRequest.fromXContent(historyId, version, dryRun, parser);
             }
     
    +        if (clusterSupportsFeature.test(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED)) {
    +            if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) {
    +                deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE);
    +            }
    +        } else {
    +            if (updateDesiredNodesRequest.getNodes().stream().anyMatch(n -> n.hasVersion() == false)) {
    +                throw new XContentParseException("[node_version] field is required and must have a valid value");
    +            }
    +        }
    +
             updateDesiredNodesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", updateDesiredNodesRequest.masterNodeTimeout()));
             return restChannel -> client.execute(
                 UpdateDesiredNodesAction.INSTANCE,
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java
    index 4c9ac8fcb9a3c..815c3ce7e2c33 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java
    @@ -11,9 +11,9 @@
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
     import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
    -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
     import org.elasticsearch.action.support.IndicesOptions;
     import org.elasticsearch.action.support.SubscribableListener;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.client.internal.node.NodeClient;
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.rest.BaseRestHandler;
    @@ -65,7 +65,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
                 if (validationException != null) {
                     throw validationException;
                 }
    -            final var responseListener = new SubscribableListener();
    +            final var responseListener = new SubscribableListener();
                 final var task = client.executeLocally(ForceMergeAction.INSTANCE, mergeRequest, responseListener);
                 responseListener.addListener(new LoggingTaskListener<>(task));
                 return sendTask(client.getLocalNodeId(), task);
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java
    index cf238d57c4cab..97964b09593f5 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java
    @@ -9,8 +9,8 @@
     package org.elasticsearch.rest.action.admin.indices;
     
     import org.elasticsearch.action.admin.indices.flush.FlushRequest;
    -import org.elasticsearch.action.admin.indices.flush.FlushResponse;
     import org.elasticsearch.action.support.IndicesOptions;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.client.internal.node.NodeClient;
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.core.RestApiVersion;
    @@ -55,14 +55,14 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
             return channel -> client.admin().indices().flush(flushRequest, new SimulateSyncedFlushResponseListener(channel));
         }
     
    -    static final class SimulateSyncedFlushResponseListener extends RestBuilderListener {
    +    static final class SimulateSyncedFlushResponseListener extends RestBuilderListener {
     
             SimulateSyncedFlushResponseListener(RestChannel channel) {
                 super(channel);
             }
     
             @Override
    -        public RestResponse buildResponse(FlushResponse flushResponse, XContentBuilder builder) throws Exception {
    +        public RestResponse buildResponse(BroadcastResponse flushResponse, XContentBuilder builder) throws Exception {
                 builder.startObject();
                 buildSyncedFlushResponse(builder, flushResponse);
                 builder.endObject();
    @@ -70,7 +70,7 @@ public RestResponse buildResponse(FlushResponse flushResponse, XContentBuilder b
                 return new RestResponse(restStatus, builder);
             }
     
    -        private static void buildSyncedFlushResponse(XContentBuilder builder, FlushResponse flushResponse) throws IOException {
    +        private static void buildSyncedFlushResponse(XContentBuilder builder, BroadcastResponse flushResponse) throws IOException {
                 builder.startObject("_shards");
                 builder.field("total", flushResponse.getTotalShards());
                 builder.field("successful", flushResponse.getSuccessfulShards());
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java
    index 8ce001f7a1a77..6845fec4db6fe 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java
    @@ -77,9 +77,9 @@ public static RestResponse buildXContentBuilder(Table table, RestChannel channel
                         Iterators.single((builder, params) -> builder.endArray())
                     ),
                     ToXContent.EMPTY_PARAMS,
    -                channel,
    -                null
    -            )
    +                channel
    +            ),
    +            null
             );
         }
     
    @@ -127,9 +127,9 @@ public static RestResponse buildTextPlainResponse(Table table, RestChannel chann
                             }
                             writer.append("\n");
                         })
    -                ),
    -                null
    -            )
    +                )
    +            ),
    +            null
             );
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
    index 7bfac46495b23..0bb97b1f51ff5 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java
    @@ -19,7 +19,7 @@
     import org.elasticsearch.rest.RestRequest;
     import org.elasticsearch.rest.Scope;
     import org.elasticsearch.rest.ServerlessScope;
    -import org.elasticsearch.rest.action.RestToXContentListener;
    +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener;
     import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
     
     import java.io.IOException;
    @@ -80,6 +80,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
                 bulkRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards));
             }
             Boolean defaultRequireAlias = request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, false);
    +        boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false);
             bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
             bulkRequest.setRefreshPolicy(request.param("refresh"));
             bulkRequest.add(
    @@ -89,13 +90,14 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
                 defaultFetchSourceContext,
                 defaultPipeline,
                 defaultRequireAlias,
    +            defaultRequireDataStream,
                 defaultListExecutedPipelines,
                 allowExplicitIndex,
                 request.getXContentType(),
                 request.getRestApiVersion()
             );
     
    -        return channel -> client.bulk(bulkRequest, new RestToXContentListener<>(channel));
    +        return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel));
         }
     
         @Override
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java
    index fed7d8606ba01..cdda3ea38129f 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java
    @@ -133,6 +133,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
             indexRequest.setIfSeqNo(request.paramAsLong("if_seq_no", indexRequest.ifSeqNo()));
             indexRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", indexRequest.ifPrimaryTerm()));
             indexRequest.setRequireAlias(request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, indexRequest.isRequireAlias()));
    +        indexRequest.setRequireDataStream(request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, indexRequest.isRequireDataStream()));
             String sOpType = request.param("op_type");
             String waitForActiveShards = request.param("wait_for_active_shards");
             if (waitForActiveShards != null) {
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java b/server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java
    index 4300293a1336e..8be023bb4a182 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java
    @@ -130,9 +130,9 @@ public RestResponse buildResponse(NodesStatsResponse response) throws Exception
                                     ChunkedToXContentHelper.endObject()
                                 ),
                                 EMPTY_PARAMS,
    -                            channel,
    -                            null
    -                        )
    +                            channel
    +                        ),
    +                        null
                         );
                     }
                 });
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
    index e0d9dd95206cf..d13c39f112878 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
    @@ -26,7 +26,7 @@
     import org.elasticsearch.rest.RestStatus;
     import org.elasticsearch.rest.Scope;
     import org.elasticsearch.rest.ServerlessScope;
    -import org.elasticsearch.rest.action.RestToXContentListener;
    +import org.elasticsearch.rest.action.RestBuilderListener;
     import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentBuilder;
    @@ -84,6 +84,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
                 defaultFetchSourceContext,
                 defaultPipeline,
                 null,
    +            null,
                 true,
                 true,
                 request.getXContentType(),
    @@ -140,7 +141,7 @@ static BytesReference convertToBulkRequestXContentBytes(Map sour
          * simulate-style xcontent.
          * Non-private for unit testing
          */
    -    static class SimulateIngestRestToXContentListener extends RestToXContentListener {
    +    static class SimulateIngestRestToXContentListener extends RestBuilderListener {
     
             SimulateIngestRestToXContentListener(RestChannel channel) {
                 super(channel);
    @@ -150,8 +151,7 @@ static class SimulateIngestRestToXContentListener extends RestToXContentListener
             public RestResponse buildResponse(BulkResponse response, XContentBuilder builder) throws Exception {
                 assert response.isFragment() == false;
                 toXContent(response, builder, channel.request());
    -            RestStatus restStatus = statusFunction.apply(response);
    -            return new RestResponse(restStatus, builder);
    +            return new RestResponse(RestStatus.OK, builder);
             }
     
             private static void toXContent(BulkResponse response, XContentBuilder builder, ToXContent.Params params) throws IOException {
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
    index a881b2497b26c..66e7f8cdcbc62 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
    @@ -49,10 +49,12 @@ public class RestMultiSearchAction extends BaseRestHandler {
     
         private final boolean allowExplicitIndex;
         private final SearchUsageHolder searchUsageHolder;
    +    private final NamedWriteableRegistry namedWriteableRegistry;
     
    -    public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder) {
    +    public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
             this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
             this.searchUsageHolder = searchUsageHolder;
    +        this.namedWriteableRegistry = namedWriteableRegistry;
         }
     
         @Override
    @@ -74,12 +76,7 @@ public String getName() {
     
         @Override
         public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
    -        final MultiSearchRequest multiSearchRequest = parseRequest(
    -            request,
    -            client.getNamedWriteableRegistry(),
    -            allowExplicitIndex,
    -            searchUsageHolder
    -        );
    +        final MultiSearchRequest multiSearchRequest = parseRequest(request, namedWriteableRegistry, allowExplicitIndex, searchUsageHolder);
             return channel -> {
                 final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
                 cancellableClient.execute(
    diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
    index 711aec182525e..067cf2d800957 100644
    --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
    +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
    @@ -69,9 +69,11 @@ public class RestSearchAction extends BaseRestHandler {
         public static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM);
     
         private final SearchUsageHolder searchUsageHolder;
    +    private final NamedWriteableRegistry namedWriteableRegistry;
     
    -    public RestSearchAction(SearchUsageHolder searchUsageHolder) {
    +    public RestSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) {
             this.searchUsageHolder = searchUsageHolder;
    +        this.namedWriteableRegistry = namedWriteableRegistry;
         }
     
         @Override
    @@ -114,7 +116,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
              */
             IntConsumer setSize = size -> searchRequest.source().size(size);
             request.withContentOrSourceParamParserOrNull(
    -            parser -> parseSearchRequest(searchRequest, request, parser, client.getNamedWriteableRegistry(), setSize, searchUsageHolder)
    +            parser -> parseSearchRequest(searchRequest, request, parser, namedWriteableRegistry, setSize, searchUsageHolder)
             );
     
             return channel -> {
    diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java
    index d2fc20ab83269..dde044bf15115 100644
    --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java
    +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java
    @@ -13,6 +13,7 @@
     import org.elasticsearch.TransportVersions;
     import org.elasticsearch.common.ParsingException;
     import org.elasticsearch.common.Strings;
    +import org.elasticsearch.common.bytes.BytesArray;
     import org.elasticsearch.common.bytes.BytesReference;
     import org.elasticsearch.common.compress.CompressorFactory;
     import org.elasticsearch.common.document.DocumentField;
    @@ -24,7 +25,9 @@
     import org.elasticsearch.common.xcontent.ChunkedToXContent;
     import org.elasticsearch.common.xcontent.XContentHelper;
     import org.elasticsearch.common.xcontent.support.XContentMapValues;
    +import org.elasticsearch.core.AbstractRefCounted;
     import org.elasticsearch.core.Nullable;
    +import org.elasticsearch.core.RefCounted;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.index.mapper.IgnoredFieldMapper;
     import org.elasticsearch.index.mapper.MapperService;
    @@ -35,6 +38,7 @@
     import org.elasticsearch.search.fetch.subphase.LookupField;
     import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
     import org.elasticsearch.search.lookup.Source;
    +import org.elasticsearch.transport.LeakTracker;
     import org.elasticsearch.transport.RemoteClusterAware;
     import org.elasticsearch.xcontent.ConstructingObjectParser;
     import org.elasticsearch.xcontent.ObjectParser;
    @@ -55,6 +59,7 @@
     import java.util.List;
     import java.util.Map;
     import java.util.Objects;
    +import java.util.stream.Collectors;
     
     import static java.util.Collections.emptyMap;
     import static java.util.Collections.unmodifiableMap;
    @@ -70,7 +75,7 @@
      *
      * @see SearchHits
      */
    -public final class SearchHit implements Writeable, ToXContentObject {
    +public final class SearchHit implements Writeable, ToXContentObject, RefCounted {
     
         private final transient int docId;
     
    @@ -114,6 +119,8 @@ public final class SearchHit implements Writeable, ToXContentObject {
     
         private Map innerHits;
     
    +    private final RefCounted refCounted;
    +
         // used only in tests
         public SearchHit(int docId) {
             this(docId, null);
    @@ -124,6 +131,10 @@ public SearchHit(int docId, String id) {
         }
     
         public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity) {
    +        this(nestedTopDocId, id, nestedIdentity, null);
    +    }
    +
    +    private SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, @Nullable RefCounted refCounted) {
             this(
                 nestedTopDocId,
                 DEFAULT_SCORE,
    @@ -142,8 +153,10 @@ public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity) {
                 null,
                 null,
                 null,
    +            null,
                 new HashMap<>(),
    -            new HashMap<>()
    +            new HashMap<>(),
    +            refCounted
             );
         }
     
    @@ -164,9 +177,11 @@ public SearchHit(
             SearchShardTarget shard,
             String index,
             String clusterAlias,
    +        Map sourceAsMap,
             Map innerHits,
             Map documentFields,
    -        Map metaFields
    +        Map metaFields,
    +        @Nullable RefCounted refCounted
         ) {
             this.docId = docId;
             this.score = score;
    @@ -184,12 +199,28 @@ public SearchHit(
             this.shard = shard;
             this.index = index;
             this.clusterAlias = clusterAlias;
    +        this.sourceAsMap = sourceAsMap;
             this.innerHits = innerHits;
             this.documentFields = documentFields;
             this.metaFields = metaFields;
    +        this.refCounted = refCounted == null ? LeakTracker.wrap(new AbstractRefCounted() {
    +            @Override
    +            protected void closeInternal() {
    +                if (SearchHit.this.innerHits != null) {
    +                    for (SearchHits h : SearchHit.this.innerHits.values()) {
    +                        h.decRef();
    +                    }
    +                    SearchHit.this.innerHits = null;
    +                }
    +                if (SearchHit.this.source instanceof RefCounted r) {
    +                    r.decRef();
    +                }
    +                SearchHit.this.source = null;
    +            }
    +        }) : ALWAYS_REFERENCED;
         }
     
    -    public static SearchHit readFrom(StreamInput in) throws IOException {
    +    public static SearchHit readFrom(StreamInput in, boolean pooled) throws IOException {
             final float score = in.readFloat();
             final int rank;
             if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
    @@ -205,7 +236,7 @@ public static SearchHit readFrom(StreamInput in) throws IOException {
             final long version = in.readLong();
             final long seqNo = in.readZLong();
             final long primaryTerm = in.readVLong();
    -        BytesReference source = in.readBytesReference();
    +        BytesReference source = pooled ? in.readReleasableBytesReference() : in.readBytesReference();
             if (source.length() == 0) {
                 source = null;
             }
    @@ -244,7 +275,7 @@ public static SearchHit readFrom(StreamInput in) throws IOException {
             if (size > 0) {
                 innerHits = Maps.newMapWithExpectedSize(size);
                 for (int i = 0; i < size; i++) {
    -                innerHits.put(in.readString(), new SearchHits(in));
    +                innerHits.put(in.readString(), SearchHits.readFrom(in, pooled));
                 }
             } else {
                 innerHits = null;
    @@ -266,16 +297,31 @@ public static SearchHit readFrom(StreamInput in) throws IOException {
                 shardTarget,
                 index,
                 clusterAlias,
    +            null,
                 innerHits,
                 documentFields,
    -            metaFields
    +            metaFields,
    +            pooled ? null : ALWAYS_REFERENCED
             );
         }
     
    +    public static SearchHit unpooled(int docId) {
    +        return unpooled(docId, null);
    +    }
    +
    +    public static SearchHit unpooled(int docId, String id) {
    +        return unpooled(docId, id, null);
    +    }
    +
    +    public static SearchHit unpooled(int nestedTopDocId, String id, NestedIdentity nestedIdentity) {
    +        return new SearchHit(nestedTopDocId, id, nestedIdentity, ALWAYS_REFERENCED);
    +    }
    +
         private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME);
     
         @Override
         public void writeTo(StreamOutput out) throws IOException {
    +        assert hasReferences();
             out.writeFloat(score);
             if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
                 out.writeVInt(rank);
    @@ -401,6 +447,7 @@ public NestedIdentity getNestedIdentity() {
          * Returns bytes reference, also uncompress the source if needed.
          */
         public BytesReference getSourceRef() {
    +        assert hasReferences();
             if (this.source == null) {
                 return null;
             }
    @@ -427,6 +474,7 @@ public SearchHit sourceRef(BytesReference source) {
          * {@code _source} or if source is disabled in the mapping.
          */
         public boolean hasSource() {
    +        assert hasReferences();
             return source != null;
         }
     
    @@ -434,6 +482,7 @@ public boolean hasSource() {
          * The source of the document as string (can be {@code null}).
          */
         public String getSourceAsString() {
    +        assert hasReferences();
             if (source == null) {
                 return null;
             }
    @@ -448,6 +497,7 @@ public String getSourceAsString() {
          * The source of the document as a map (can be {@code null}).
          */
         public Map getSourceAsMap() {
    +        assert hasReferences();
             if (source == null) {
                 return null;
             }
    @@ -463,6 +513,7 @@ public Map getSourceAsMap() {
          * The hit field matching the given field name.
          */
         public DocumentField field(String fieldName) {
    +        assert hasReferences();
             DocumentField result = documentFields.get(fieldName);
             if (result != null) {
                 return result;
    @@ -653,13 +704,72 @@ public Map getMatchedQueriesAndScores() {
          * @return Inner hits or null if there are none
          */
         public Map getInnerHits() {
    +        assert hasReferences();
             return innerHits;
         }
     
         public void setInnerHits(Map innerHits) {
    +        assert innerHits == null || innerHits.values().stream().noneMatch(h -> h.hasReferences() == false);
    +        assert this.innerHits == null;
             this.innerHits = innerHits;
         }
     
    +    @Override
    +    public void incRef() {
    +        refCounted.incRef();
    +    }
    +
    +    @Override
    +    public boolean tryIncRef() {
    +        return refCounted.tryIncRef();
    +    }
    +
    +    @Override
    +    public boolean decRef() {
    +        return refCounted.decRef();
    +    }
    +
    +    @Override
    +    public boolean hasReferences() {
    +        return refCounted.hasReferences();
    +    }
    +
    +    public SearchHit asUnpooled() {
    +        assert hasReferences();
    +        if (isPooled() == false) {
    +            return this;
    +        }
    +        return new SearchHit(
    +            docId,
    +            score,
    +            rank,
    +            id,
    +            nestedIdentity,
    +            version,
    +            seqNo,
    +            primaryTerm,
    +            source instanceof RefCounted ? new BytesArray(source.toBytesRef(), true) : source,
    +            highlightFields,
    +            sortValues,
    +            matchedQueries,
    +            explanation,
    +            shard,
    +            index,
    +            clusterAlias,
    +            sourceAsMap,
    +            innerHits == null
    +                ? null
    +                : innerHits.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().asUnpooled())),
    +            documentFields,
    +            metaFields,
    +            ALWAYS_REFERENCED
    +        );
    +    }
    +
    +    public boolean isPooled() {
    +        return refCounted != ALWAYS_REFERENCED;
    +    }
    +
         public static class Fields {
             static final String _INDEX = "_index";
             static final String _ID = "_id";
    @@ -690,6 +800,7 @@ public static class Fields {
     
         @Override
         public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    +        assert hasReferences();
             builder.startObject();
             toInnerXContent(builder, params);
             builder.endObject();
    @@ -972,9 +1083,11 @@ public static SearchHit createFromMap(Map values) {
                 shardTarget,
                 index,
                 clusterAlias,
    +            null,
                 get(Fields.INNER_HITS, values, null),
                 get(DOCUMENT_FIELDS, values, Collections.emptyMap()),
    -            get(METADATA_FIELDS, values, Collections.emptyMap())
    +            get(METADATA_FIELDS, values, Collections.emptyMap()),
    +            ALWAYS_REFERENCED // TODO: do we ever want pooling here?
             );
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java
    index c689f928954d2..a5c9425ba754c 100644
    --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java
    +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java
    @@ -18,8 +18,11 @@
     import org.elasticsearch.common.lucene.Lucene;
     import org.elasticsearch.common.xcontent.ChunkedToXContent;
     import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
    +import org.elasticsearch.core.AbstractRefCounted;
     import org.elasticsearch.core.Nullable;
    +import org.elasticsearch.core.RefCounted;
     import org.elasticsearch.rest.action.search.RestSearchAction;
    +import org.elasticsearch.transport.LeakTracker;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentParser;
     
    @@ -32,7 +35,7 @@
     
     import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     
    -public final class SearchHits implements Writeable, ChunkedToXContent, Iterable {
    +public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable {
     
         public static final SearchHit[] EMPTY = new SearchHit[0];
         public static final SearchHits EMPTY_WITH_TOTAL_HITS = SearchHits.empty(new TotalHits(0, Relation.EQUAL_TO), 0);
    @@ -48,6 +51,8 @@ public final class SearchHits implements Writeable, ChunkedToXContent, Iterable<
         @Nullable
         private final Object[] collapseValues;
     
    +    private final RefCounted refCounted;
    +
         public static SearchHits empty(@Nullable TotalHits totalHits, float maxScore) {
             return new SearchHits(EMPTY, totalHits, maxScore);
         }
    @@ -63,6 +68,35 @@ public SearchHits(
             @Nullable SortField[] sortFields,
             @Nullable String collapseField,
             @Nullable Object[] collapseValues
    +    ) {
    +        this(
    +            hits,
    +            totalHits,
    +            maxScore,
    +            sortFields,
    +            collapseField,
    +            collapseValues,
    +            hits.length == 0 ? ALWAYS_REFERENCED : LeakTracker.wrap(new AbstractRefCounted() {
    +                @Override
    +                protected void closeInternal() {
    +                    for (int i = 0; i < hits.length; i++) {
    +                        assert hits[i] != null;
    +                        hits[i].decRef();
    +                        hits[i] = null;
    +                    }
    +                }
    +            })
    +        );
    +    }
    +
    +    private SearchHits(
    +        SearchHit[] hits,
    +        @Nullable TotalHits totalHits,
    +        float maxScore,
    +        @Nullable SortField[] sortFields,
    +        @Nullable String collapseField,
    +        @Nullable Object[] collapseValues,
    +        RefCounted refCounted
         ) {
             this.hits = hits;
             this.totalHits = totalHits;
    @@ -70,32 +104,64 @@ public SearchHits(
             this.sortFields = sortFields;
             this.collapseField = collapseField;
             this.collapseValues = collapseValues;
    +        this.refCounted = refCounted;
         }
     
    -    public SearchHits(StreamInput in) throws IOException {
    +    public static SearchHits unpooled(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) {
    +        return unpooled(hits, totalHits, maxScore, null, null, null);
    +    }
    +
    +    public static SearchHits unpooled(
    +        SearchHit[] hits,
    +        @Nullable TotalHits totalHits,
    +        float maxScore,
    +        @Nullable SortField[] sortFields,
    +        @Nullable String collapseField,
    +        @Nullable Object[] collapseValues
    +    ) {
    +        assert assertUnpooled(hits);
    +        return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues, ALWAYS_REFERENCED);
    +    }
    +
    +    private static boolean assertUnpooled(SearchHit[] searchHits) {
    +        for (SearchHit searchHit : searchHits) {
    +            assert searchHit.isPooled() == false : "hit was pooled [" + searchHit + "]";
    +        }
    +        return true;
    +    }
    +
    +    public static SearchHits readFrom(StreamInput in, boolean pooled) throws IOException {
    +        final TotalHits totalHits;
             if (in.readBoolean()) {
                 totalHits = Lucene.readTotalHits(in);
             } else {
                 // track_total_hits is false
                 totalHits = null;
             }
    -        maxScore = in.readFloat();
    +        final float maxScore = in.readFloat();
             int size = in.readVInt();
    +        final SearchHit[] hits;
             if (size == 0) {
                 hits = EMPTY;
             } else {
                 hits = new SearchHit[size];
                 for (int i = 0; i < hits.length; i++) {
    -                hits[i] = SearchHit.readFrom(in);
    +                hits[i] = SearchHit.readFrom(in, pooled);
                 }
             }
    -        sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new);
    -        collapseField = in.readOptionalString();
    -        collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new);
    +        var sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new);
    +        var collapseField = in.readOptionalString();
    +        var collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new);
    +        if (pooled) {
    +            return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues);
    +        } else {
    +            return unpooled(hits, totalHits, maxScore, sortFields, collapseField, collapseValues);
    +        }
         }
     
         @Override
         public void writeTo(StreamOutput out) throws IOException {
    +        assert hasReferences();
             final boolean hasTotalHits = totalHits != null;
             out.writeBoolean(hasTotalHits);
             if (hasTotalHits) {
    @@ -128,6 +194,7 @@ public float getMaxScore() {
          * The hits of the search request (based on the search type, and from / size provided).
          */
         public SearchHit[] getHits() {
    +        assert hasReferences();
             return this.hits;
         }
     
    @@ -135,6 +202,7 @@ public SearchHit[] getHits() {
          * Return the hit as the provided position.
          */
         public SearchHit getAt(int position) {
    +        assert hasReferences();
             return hits[position];
         }
     
    @@ -165,9 +233,42 @@ public Object[] getCollapseValues() {
     
         @Override
         public Iterator iterator() {
    +        assert hasReferences();
             return Iterators.forArray(getHits());
         }
     
    +    @Override
    +    public void incRef() {
    +        refCounted.incRef();
    +    }
    +
    +    @Override
    +    public boolean tryIncRef() {
    +        return refCounted.tryIncRef();
    +    }
    +
    +    @Override
    +    public boolean decRef() {
    +        return refCounted.decRef();
    +    }
    +
    +    @Override
    +    public boolean hasReferences() {
    +        return refCounted.hasReferences();
    +    }
    +
    +    public SearchHits asUnpooled() {
    +        assert hasReferences();
    +        if (refCounted == ALWAYS_REFERENCED) {
    +            return this;
    +        }
    +        final SearchHit[] unpooledHits = new SearchHit[hits.length];
    +        for (int i = 0; i < hits.length; i++) {
    +            unpooledHits[i] = hits[i].asUnpooled();
    +        }
    +        return unpooled(unpooledHits, totalHits, maxScore, sortFields, collapseField, collapseValues);
    +    }
    +
         public static final class Fields {
             public static final String HITS = "hits";
             public static final String TOTAL = "total";
    @@ -176,6 +277,7 @@ public static final class Fields {
     
         @Override
         public Iterator toXContentChunked(ToXContent.Params params) {
    +        assert hasReferences();
             return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> {
                 boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false);
                 if (totalHitAsInt) {
    @@ -239,7 +341,7 @@ public static SearchHits fromXContent(XContentParser parser) throws IOException
                     }
                 }
             }
    -        return new SearchHits(hits.toArray(SearchHits.EMPTY), totalHits, maxScore);
    +        return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore);
         }
     
         @Override
    diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java
    index 8ba48563c8f55..5b17203ded132 100644
    --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java
    +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java
    @@ -245,6 +245,7 @@
     import org.elasticsearch.search.suggest.phrase.StupidBackoff;
     import org.elasticsearch.search.suggest.term.TermSuggestion;
     import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
    +import org.elasticsearch.search.vectors.ExactKnnQueryBuilder;
     import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder;
     import org.elasticsearch.search.vectors.KnnVectorQueryBuilder;
     import org.elasticsearch.search.vectors.QueryVectorBuilder;
    @@ -1130,6 +1131,9 @@ private void registerQueryParsers(List plugins) {
             registerQuery(new QuerySpec<>(KnnScoreDocQueryBuilder.NAME, KnnScoreDocQueryBuilder::new, parser -> {
                 throw new IllegalArgumentException("[score_doc] queries cannot be provided directly");
             }));
    +        registerQuery(new QuerySpec<>(ExactKnnQueryBuilder.NAME, ExactKnnQueryBuilder::new, parser -> {
    +            throw new IllegalArgumentException("[exact_knn] queries cannot be provided directly");
    +        }));
     
             registerFromPlugin(plugins, SearchPlugin::getQueries, this::registerQuery);
     
    diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java
    index 8a03c7e9f08ba..d5b2565187a3f 100644
    --- a/server/src/main/java/org/elasticsearch/search/SearchService.java
    +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java
    @@ -53,6 +53,7 @@
     import org.elasticsearch.index.engine.Engine;
     import org.elasticsearch.index.query.CoordinatorRewriteContextProvider;
     import org.elasticsearch.index.query.InnerHitContextBuilder;
    +import org.elasticsearch.index.query.InnerHitsRewriteContext;
     import org.elasticsearch.index.query.MatchAllQueryBuilder;
     import org.elasticsearch.index.query.MatchNoneQueryBuilder;
     import org.elasticsearch.index.query.QueryBuilder;
    @@ -1234,13 +1235,19 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
             context.size(source.size());
             Map innerHitBuilders = new HashMap<>();
             QueryBuilder query = source.query();
    +        InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext(
    +            context.getSearchExecutionContext().getParserConfig(),
    +            context::getRelativeTimeInMillis
    +        );
             if (query != null) {
    -            InnerHitContextBuilder.extractInnerHits(query, innerHitBuilders);
    +            QueryBuilder rewrittenForInnerHits = Rewriteable.rewrite(query, innerHitsRewriteContext, true);
    +            InnerHitContextBuilder.extractInnerHits(rewrittenForInnerHits, innerHitBuilders);
                 searchExecutionContext.setAliasFilter(context.request().getAliasFilter().getQueryBuilder());
                 context.parsedQuery(searchExecutionContext.toQuery(query));
             }
             if (source.postFilter() != null) {
    -            InnerHitContextBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
    +            QueryBuilder rewrittenForInnerHits = Rewriteable.rewrite(source.postFilter(), innerHitsRewriteContext, true);
    +            InnerHitContextBuilder.extractInnerHits(rewrittenForInnerHits, innerHitBuilders);
                 context.parsedPostFilter(searchExecutionContext.toQuery(source.postFilter()));
             }
             if (innerHitBuilders.size() > 0) {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java
    deleted file mode 100644
    index 3e15488cc430b..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java
    +++ /dev/null
    @@ -1,145 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations;
    -
    -import org.apache.lucene.util.SetOnce;
    -import org.elasticsearch.common.ParsingException;
    -import org.elasticsearch.common.collect.Iterators;
    -import org.elasticsearch.common.util.Maps;
    -import org.elasticsearch.xcontent.ToXContentFragment;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.Collections;
    -import java.util.Iterator;
    -import java.util.List;
    -import java.util.Locale;
    -import java.util.Map;
    -import java.util.Objects;
    -
    -import static java.util.Collections.emptyMap;
    -import static java.util.Collections.unmodifiableMap;
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject;
    -
    -/**
    - * Represents a set of {@link Aggregation}s
    - */
    -public class Aggregations implements Iterable, ToXContentFragment {
    -
    -    public static final String AGGREGATIONS_FIELD = "aggregations";
    -
    -    protected final List aggregations;
    -    private Map aggregationsAsMap;
    -
    -    public Aggregations(List aggregations) {
    -        this.aggregations = aggregations;
    -        if (aggregations.isEmpty()) {
    -            aggregationsAsMap = emptyMap();
    -        }
    -    }
    -
    -    /**
    -     * Iterates over the {@link Aggregation}s.
    -     */
    -    @Override
    -    public final Iterator iterator() {
    -        return Iterators.map(aggregations.iterator(), p -> (Aggregation) p);
    -    }
    -
    -    /**
    -     * The list of {@link Aggregation}s.
    -     */
    -    public final List asList() {
    -        return Collections.unmodifiableList(aggregations);
    -    }
    -
    -    /**
    -     * Returns the {@link Aggregation}s keyed by aggregation name.
    -     */
    -    public final Map asMap() {
    -        return getAsMap();
    -    }
    -
    -    /**
    -     * Returns the {@link Aggregation}s keyed by aggregation name.
    -     */
    -    public final Map getAsMap() {
    -        if (aggregationsAsMap == null) {
    -            Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size());
    -            for (Aggregation aggregation : aggregations) {
    -                newAggregationsAsMap.put(aggregation.getName(), aggregation);
    -            }
    -            this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap);
    -        }
    -        return aggregationsAsMap;
    -    }
    -
    -    /**
    -     * Returns the aggregation that is associated with the specified name.
    -     */
    -    @SuppressWarnings("unchecked")
    -    public final  A get(String name) {
    -        return (A) asMap().get(name);
    -    }
    -
    -    @Override
    -    public final boolean equals(Object obj) {
    -        if (obj == null || getClass() != obj.getClass()) {
    -            return false;
    -        }
    -        return aggregations.equals(((Aggregations) obj).aggregations);
    -    }
    -
    -    @Override
    -    public final int hashCode() {
    -        return Objects.hash(getClass(), aggregations);
    -    }
    -
    -    @Override
    -    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -        if (aggregations.isEmpty()) {
    -            return builder;
    -        }
    -        builder.startObject(AGGREGATIONS_FIELD);
    -        toXContentInternal(builder, params);
    -        return builder.endObject();
    -    }
    -
    -    /**
    -     * Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs)
    -     */
    -    public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException {
    -        for (Aggregation aggregation : aggregations) {
    -            aggregation.toXContent(builder, params);
    -        }
    -        return builder;
    -    }
    -
    -    public static Aggregations fromXContent(XContentParser parser) throws IOException {
    -        final List aggregations = new ArrayList<>();
    -        XContentParser.Token token;
    -        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -            if (token == XContentParser.Token.START_OBJECT) {
    -                SetOnce typedAgg = new SetOnce<>();
    -                String currentField = parser.currentName();
    -                parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, typedAgg::set);
    -                if (typedAgg.get() != null) {
    -                    aggregations.add(typedAgg.get());
    -                } else {
    -                    throw new ParsingException(
    -                        parser.getTokenLocation(),
    -                        String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField)
    -                    );
    -                }
    -            }
    -        }
    -        return new Aggregations(aggregations);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/HasAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/HasAggregations.java
    index 4082bdce358a0..048ca4e5a29ef 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/HasAggregations.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/HasAggregations.java
    @@ -10,6 +10,6 @@
     
     public interface HasAggregations {
     
    -    Aggregations getAggregations();
    +    InternalAggregations getAggregations();
     
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java
    index c1c54f80987f0..0c299bce7c29d 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java
    @@ -7,32 +7,45 @@
      */
     package org.elasticsearch.search.aggregations;
     
    +import org.apache.lucene.util.SetOnce;
    +import org.elasticsearch.common.ParsingException;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.io.stream.Writeable;
    +import org.elasticsearch.common.util.Maps;
     import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
     import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator;
     import org.elasticsearch.search.aggregations.support.AggregationPath;
     import org.elasticsearch.search.aggregations.support.SamplingContext;
     import org.elasticsearch.search.sort.SortValue;
    +import org.elasticsearch.xcontent.ToXContentFragment;
    +import org.elasticsearch.xcontent.XContentBuilder;
    +import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
     import java.util.ArrayList;
    -import java.util.Collections;
     import java.util.Comparator;
     import java.util.HashMap;
     import java.util.Iterator;
     import java.util.List;
    +import java.util.Locale;
     import java.util.Map;
    +import java.util.Objects;
     import java.util.Optional;
     import java.util.stream.Collectors;
     
    +import static java.util.Collections.unmodifiableList;
    +import static java.util.Collections.unmodifiableMap;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject;
    +
     /**
    - * An internal implementation of {@link Aggregations}.
    + * Represents a set of {@link InternalAggregation}s
      */
    -public final class InternalAggregations extends Aggregations implements Writeable {
    +public final class InternalAggregations implements Iterable, ToXContentFragment, Writeable {
    +
    +    public static final String AGGREGATIONS_FIELD = "aggregations";
     
    -    public static final InternalAggregations EMPTY = new InternalAggregations(Collections.emptyList());
    +    public static final InternalAggregations EMPTY = new InternalAggregations(List.of());
     
         private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> {
             if (agg1.canLeadReduction() == agg2.canLeadReduction()) {
    @@ -44,11 +57,115 @@ public final class InternalAggregations extends Aggregations implements Writeabl
             }
         };
     
    +    private final List aggregations;
    +    private Map aggregationsAsMap;
    +
         /**
          * Constructs a new aggregation.
          */
         private InternalAggregations(List aggregations) {
    -        super(aggregations);
    +        this.aggregations = aggregations;
    +        if (aggregations.isEmpty()) {
    +            aggregationsAsMap = Map.of();
    +        }
    +    }
    +
    +    /**
    +     * Iterates over the {@link InternalAggregation}s.
    +     */
    +    @Override
    +    public Iterator iterator() {
    +        return aggregations.iterator();
    +    }
    +
    +    /**
    +     * The list of {@link InternalAggregation}s.
    +     */
    +    public List asList() {
    +        return unmodifiableList(aggregations);
    +    }
    +
    +    /**
    +     * Returns the {@link InternalAggregation}s keyed by aggregation name.
    +     */
    +    public Map asMap() {
    +        return getAsMap();
    +    }
    +
    +    /**
    +     * Returns the {@link InternalAggregation}s keyed by aggregation name.
    +     */
    +    public Map getAsMap() {
    +        if (aggregationsAsMap == null) {
    +            Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size());
    +            for (InternalAggregation aggregation : aggregations) {
    +                newAggregationsAsMap.put(aggregation.getName(), aggregation);
    +            }
    +            this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap);
    +        }
    +        return aggregationsAsMap;
    +    }
    +
    +    /**
    +     * Returns the aggregation that is associated with the specified name.
    +     */
    +    @SuppressWarnings("unchecked")
    +    public  A get(String name) {
    +        return (A) asMap().get(name);
    +    }
    +
    +    @Override
    +    public boolean equals(Object obj) {
    +        if (obj == null || getClass() != obj.getClass()) {
    +            return false;
    +        }
    +        return aggregations.equals(((InternalAggregations) obj).aggregations);
    +    }
    +
    +    @Override
    +    public int hashCode() {
    +        return Objects.hash(getClass(), aggregations);
    +    }
    +
    +    @Override
    +    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    +        if (aggregations.isEmpty()) {
    +            return builder;
    +        }
    +        builder.startObject(AGGREGATIONS_FIELD);
    +        toXContentInternal(builder, params);
    +        return builder.endObject();
    +    }
    +
    +    /**
    +     * Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs)
    +     */
    +    public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException {
    +        for (InternalAggregation aggregation : aggregations) {
    +            aggregation.toXContent(builder, params);
    +        }
    +        return builder;
    +    }
    +
    +    public static InternalAggregations fromXContent(XContentParser parser) throws IOException {
    +        final List aggregations = new ArrayList<>();
    +        XContentParser.Token token;
    +        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +            if (token == XContentParser.Token.START_OBJECT) {
    +                SetOnce typedAgg = new SetOnce<>();
    +                String currentField = parser.currentName();
    +                parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set);
    +                if (typedAgg.get() != null) {
    +                    aggregations.add(typedAgg.get());
    +                } else {
    +                    throw new ParsingException(
    +                        parser.getTokenLocation(),
    +                        String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField)
    +                    );
    +                }
    +            }
    +        }
    +        return new InternalAggregations(aggregations);
         }
     
         public static InternalAggregations from(List aggregations) {
    @@ -74,9 +191,8 @@ public List copyResults() {
             return new ArrayList<>(getInternalAggregations());
         }
     
    -    @SuppressWarnings("unchecked")
         private List getInternalAggregations() {
    -        return (List) aggregations;
    +        return aggregations;
         }
     
         /**
    @@ -138,12 +254,12 @@ public static InternalAggregations reduce(List aggregation
             // first we collect all aggregations of the same type and list them together
             Map> aggByName = new HashMap<>();
             for (InternalAggregations aggregations : aggregationsList) {
    -            for (Aggregation aggregation : aggregations.aggregations) {
    +            for (InternalAggregation aggregation : aggregations.aggregations) {
                     List aggs = aggByName.computeIfAbsent(
                         aggregation.getName(),
                         k -> new ArrayList<>(aggregationsList.size())
                     );
    -                aggs.add((InternalAggregation) aggregation);
    +                aggs.add(aggregation);
                 }
             }
     
    @@ -173,9 +289,7 @@ public static InternalAggregations reduce(List aggregation
          */
         public static InternalAggregations finalizeSampling(InternalAggregations internalAggregations, SamplingContext samplingContext) {
             return from(
    -            internalAggregations.aggregations.stream()
    -                .map(agg -> ((InternalAggregation) agg).finalizeSampling(samplingContext))
    -                .collect(Collectors.toList())
    +            internalAggregations.aggregations.stream().map(agg -> agg.finalizeSampling(samplingContext)).collect(Collectors.toList())
             );
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java
    index ff1ca58d351e3..dda632e7aa020 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java
    @@ -168,7 +168,7 @@ public InternalAggregation copyWithRewritenBuckets(Function newBuckets = new ArrayList<>();
             for (B bucket : getBuckets()) {
    -            InternalAggregations rewritten = rewriter.apply((InternalAggregations) bucket.getAggregations());
    +            InternalAggregations rewritten = rewriter.apply(bucket.getAggregations());
                 if (rewritten == null) {
                     newBuckets.add(bucket);
                     continue;
    @@ -188,7 +188,7 @@ protected boolean mustReduceOnSingleInternalAgg() {
         @Override
         public void forEachBucket(Consumer consumer) {
             for (B bucket : getBuckets()) {
    -            consumer.accept((InternalAggregations) bucket.getAggregations());
    +            consumer.accept(bucket.getAggregations());
             }
         }
     
    @@ -211,7 +211,7 @@ public Object getProperty(String containingAggName, List path) {
                 if (path.isEmpty()) {
                     return this;
                 }
    -            Aggregations aggregations = getAggregations();
    +            InternalAggregations aggregations = getAggregations();
                 String aggName = path.get(0);
                 if (aggName.equals("_count")) {
                     if (path.size() > 1) {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java
    index 15d4a03be81f2..07ddf5fd4555b 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java
    @@ -72,8 +72,8 @@ public  Comparator partiallyBuiltBucketComparator(ToLongFun
             @Override
             public Comparator comparator() {
                 return (lhs, rhs) -> {
    -                final SortValue l = path.resolveValue(((InternalAggregations) lhs.getAggregations()));
    -                final SortValue r = path.resolveValue(((InternalAggregations) rhs.getAggregations()));
    +                final SortValue l = path.resolveValue(lhs.getAggregations());
    +                final SortValue r = path.resolveValue(rhs.getAggregations());
                     int compareResult = l.compareTo(r);
                     return order == SortOrder.ASC ? compareResult : -compareResult;
                 };
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java
    deleted file mode 100644
    index ac1d8b970cb90..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java
    +++ /dev/null
    @@ -1,80 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations;
    -
    -import org.elasticsearch.xcontent.AbstractObjectParser;
    -import org.elasticsearch.xcontent.ToXContent;
    -import org.elasticsearch.xcontent.ToXContentFragment;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -import org.elasticsearch.xcontent.XContentParser.Token;
    -
    -import java.io.IOException;
    -import java.util.Collections;
    -import java.util.Map;
    -
    -/**
    - * An implementation of {@link Aggregation} that is parsed from a REST response.
    - * Serves as a base class for all aggregation implementations that are parsed from REST.
    - */
    -public abstract class ParsedAggregation implements Aggregation, ToXContentFragment {
    -
    -    protected static void declareAggregationFields(AbstractObjectParser objectParser) {
    -        objectParser.declareObject(
    -            (parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata),
    -            (parser, context) -> parser.map(),
    -            InternalAggregation.CommonFields.META
    -        );
    -    }
    -
    -    private String name;
    -    protected Map metadata;
    -
    -    @Override
    -    public final String getName() {
    -        return name;
    -    }
    -
    -    protected void setName(String name) {
    -        this.name = name;
    -    }
    -
    -    @Override
    -    public final Map getMetadata() {
    -        return metadata;
    -    }
    -
    -    @Override
    -    public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
    -        // Concatenates the type and the name of the aggregation (ex: top_hits#foo)
    -        builder.startObject(String.join(InternalAggregation.TYPED_KEYS_DELIMITER, getType(), name));
    -        if (this.metadata != null) {
    -            builder.field(InternalAggregation.CommonFields.META.getPreferredName());
    -            builder.map(this.metadata);
    -        }
    -        doXContentBody(builder, params);
    -        builder.endObject();
    -        return builder;
    -    }
    -
    -    protected abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException;
    -
    -    /**
    -     * Parse a token of type XContentParser.Token.VALUE_NUMBER or XContentParser.Token.STRING to a double.
    -     * In other cases the default value is returned instead.
    -     */
    -    protected static double parseDouble(XContentParser parser, double defaultNullValue) throws IOException {
    -        Token currentToken = parser.currentToken();
    -        if (currentToken == XContentParser.Token.VALUE_NUMBER || currentToken == XContentParser.Token.VALUE_STRING) {
    -            return parser.doubleValue();
    -        } else {
    -            return defaultNullValue;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java
    deleted file mode 100644
    index dce1ac0df53ee..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java
    +++ /dev/null
    @@ -1,183 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations;
    -
    -import org.elasticsearch.common.CheckedBiConsumer;
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.function.Supplier;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -public abstract class ParsedMultiBucketAggregation extends ParsedAggregation
    -    implements
    -        MultiBucketsAggregation {
    -
    -    protected final List buckets = new ArrayList<>();
    -    protected boolean keyed = false;
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        if (keyed) {
    -            builder.startObject(CommonFields.BUCKETS.getPreferredName());
    -        } else {
    -            builder.startArray(CommonFields.BUCKETS.getPreferredName());
    -        }
    -        for (B bucket : buckets) {
    -            bucket.toXContent(builder, params);
    -        }
    -        if (keyed) {
    -            builder.endObject();
    -        } else {
    -            builder.endArray();
    -        }
    -        return builder;
    -    }
    -
    -    public static , T extends ParsedBucket> void declareMultiBucketAggregationFields(
    -        final ObjectParser objectParser,
    -        final CheckedFunction bucketParser,
    -        final CheckedFunction keyedBucketParser
    -    ) {
    -        declareAggregationFields(objectParser);
    -        objectParser.declareField((parser, aggregation, context) -> {
    -            XContentParser.Token token = parser.currentToken();
    -            if (token == XContentParser.Token.START_OBJECT) {
    -                aggregation.keyed = true;
    -                while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
    -                    aggregation.buckets.add(keyedBucketParser.apply(parser));
    -                }
    -            } else if (token == XContentParser.Token.START_ARRAY) {
    -                aggregation.keyed = false;
    -                while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
    -                    aggregation.buckets.add(bucketParser.apply(parser));
    -                }
    -            }
    -        }, CommonFields.BUCKETS, ObjectParser.ValueType.OBJECT_ARRAY);
    -    }
    -
    -    public abstract static class ParsedBucket implements MultiBucketsAggregation.Bucket {
    -
    -        private Aggregations aggregations;
    -        private String keyAsString;
    -        private long docCount;
    -        private boolean keyed;
    -
    -        protected void setKeyAsString(String keyAsString) {
    -            this.keyAsString = keyAsString;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            return keyAsString;
    -        }
    -
    -        protected void setDocCount(long docCount) {
    -            this.docCount = docCount;
    -        }
    -
    -        @Override
    -        public long getDocCount() {
    -            return docCount;
    -        }
    -
    -        public void setKeyed(boolean keyed) {
    -            this.keyed = keyed;
    -        }
    -
    -        protected boolean isKeyed() {
    -            return keyed;
    -        }
    -
    -        protected void setAggregations(Aggregations aggregations) {
    -            this.aggregations = aggregations;
    -        }
    -
    -        @Override
    -        public Aggregations getAggregations() {
    -            return aggregations;
    -        }
    -
    -        @Override
    -        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            if (keyed) {
    -                // Subclasses can override the getKeyAsString method to handle specific cases like
    -                // keyed bucket with RAW doc value format where the key_as_string field is not printed
    -                // out but we still need to have a string version of the key to use as the bucket's name.
    -                builder.startObject(getKeyAsString());
    -            } else {
    -                builder.startObject();
    -            }
    -            if (keyAsString != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            keyToXContent(builder);
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
    -            aggregations.toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            return builder.field(CommonFields.KEY.getPreferredName(), getKey());
    -        }
    -
    -        protected static  B parseXContent(
    -            final XContentParser parser,
    -            final boolean keyed,
    -            final Supplier bucketSupplier,
    -            final CheckedBiConsumer keyConsumer
    -        ) throws IOException {
    -            final B bucket = bucketSupplier.get();
    -            bucket.setKeyed(keyed);
    -            XContentParser.Token token = parser.currentToken();
    -            String currentFieldName = parser.currentName();
    -            if (keyed) {
    -                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    -                ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    -            }
    -
    -            List aggregations = new ArrayList<>();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        keyConsumer.accept(parser, bucket);
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        keyConsumer.accept(parser, bucket);
    -                    } else {
    -                        XContentParserUtils.parseTypedKeysObject(
    -                            parser,
    -                            Aggregation.TYPED_KEYS_DELIMITER,
    -                            Aggregation.class,
    -                            aggregations::add
    -                        );
    -                    }
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java
    index 7c3c6f8397979..074688550fb44 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java
    @@ -40,7 +40,11 @@
      * this collector.
      */
     public class BestBucketsDeferringCollector extends DeferringBucketCollector {
    -    record Entry(AggregationExecutionContext aggCtx, PackedLongValues docDeltas, PackedLongValues buckets) {
    +    private static class Entry {
    +        AggregationExecutionContext aggCtx;
    +        PackedLongValues docDeltas;
    +        PackedLongValues buckets;
    +
             Entry(AggregationExecutionContext aggCtx, PackedLongValues docDeltas, PackedLongValues buckets) {
                 this.aggCtx = Objects.requireNonNull(aggCtx);
                 this.docDeltas = Objects.requireNonNull(docDeltas);
    @@ -200,6 +204,9 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException {
                     // collection was terminated prematurely
                     // continue with the following leaf
                 }
    +            // release resources
    +            entry.buckets = null;
    +            entry.docDeltas = null;
             }
             collector.postCollection();
         }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java
    index ec8117cf03135..fc2e7f04f2c59 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java
    @@ -34,6 +34,7 @@
     import java.util.Map;
     import java.util.function.BiConsumer;
     import java.util.function.Function;
    +import java.util.function.IntFunction;
     import java.util.function.LongUnaryOperator;
     import java.util.function.ToLongFunction;
     
    @@ -172,28 +173,27 @@ protected void prepareSubAggs(long[] ordsToCollect) throws IOException {}
          * @return the sub-aggregation results in the same order as the provided
          *         array of ordinals
          */
    -    protected final InternalAggregations[] buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException {
    +    protected final IntFunction buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException {
             prepareSubAggs(bucketOrdsToCollect);
             InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][];
             for (int i = 0; i < subAggregators.length; i++) {
                 aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect);
             }
    -        InternalAggregations[] result = new InternalAggregations[bucketOrdsToCollect.length];
    -        for (int ord = 0; ord < bucketOrdsToCollect.length; ord++) {
    -            final int thisOrd = ord;
    -            result[ord] = InternalAggregations.from(new AbstractList<>() {
    -                @Override
    -                public InternalAggregation get(int index) {
    -                    return aggregations[index][thisOrd];
    -                }
    +        return subAggsForBucketFunction(aggregations);
    +    }
     
    -                @Override
    -                public int size() {
    -                    return aggregations.length;
    -                }
    -            });
    -        }
    -        return result;
    +    private static IntFunction subAggsForBucketFunction(InternalAggregation[][] aggregations) {
    +        return ord -> InternalAggregations.from(new AbstractList<>() {
    +            @Override
    +            public InternalAggregation get(int index) {
    +                return aggregations[index][ord];
    +            }
    +
    +            @Override
    +            public int size() {
    +                return aggregations.length;
    +            }
    +        });
         }
     
         /**
    @@ -221,11 +221,11 @@ protected final  void buildSubAggsForAllBuckets(
                     bucketOrdsToCollect[s++] = bucketToOrd.applyAsLong(bucket);
                 }
             }
    -        InternalAggregations[] results = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var results = buildSubAggsForBuckets(bucketOrdsToCollect);
             s = 0;
             for (B[] bucket : buckets) {
                 for (int b = 0; b < bucket.length; b++) {
    -                setAggs.accept(bucket[b], results[s++]);
    +                setAggs.accept(bucket[b], results.apply(s++));
                 }
             }
         }
    @@ -254,7 +254,7 @@ protected final  InternalAggregation[] buildAggregationsForFixedBucketCount(
                 }
             }
             bucketOrdIdx = 0;
    -        InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
             InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
             for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) {
                 List buckets = new ArrayList<>(bucketsPerOwningBucketOrd);
    @@ -263,7 +263,7 @@ protected final  InternalAggregation[] buildAggregationsForFixedBucketCount(
                         bucketBuilder.build(
                             offsetInOwningOrd,
                             bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]),
    -                        subAggregationResults[bucketOrdIdx++]
    +                        subAggregationResults.apply(bucketOrdIdx++)
                         )
                     );
                 }
    @@ -289,10 +289,10 @@ protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] ow
              * `consumeBucketsAndMaybeBreak(owningBucketOrds.length)`
              * here but we don't because single bucket aggs never have.
              */
    -        InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(owningBucketOrds);
    +        var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds);
             InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
             for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
    -            results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults[ordIdx]);
    +            results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults.apply(ordIdx));
             }
             return results;
         }
    @@ -336,7 +336,7 @@ protected final  InternalAggregation[] buildAggregationsForVariableBuckets(
                     bucketOrdsToCollect[b++] = ordsEnum.ord();
                 }
             }
    -        InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
     
             InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
             b = 0;
    @@ -352,7 +352,7 @@ protected final  InternalAggregation[] buildAggregationsForVariableBuckets(
                             bucketOrdsToCollect[b]
                         );
                     }
    -                buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults[b++]));
    +                buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++)));
                 }
                 results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], buckets);
             }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java
    index 10c21d4b549d8..ba3e18b1406f9 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java
    @@ -9,8 +9,8 @@
     package org.elasticsearch.search.aggregations.bucket;
     
     import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.HasAggregations;
    +import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.xcontent.ToXContent;
     
     import java.util.List;
    @@ -43,7 +43,7 @@ interface Bucket extends HasAggregations, ToXContent {
              * @return  The sub-aggregations of this bucket
              */
             @Override
    -        Aggregations getAggregations();
    +        InternalAggregations getAggregations();
     
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java
    deleted file mode 100644
    index 9f8891fc37b8e..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java
    +++ /dev/null
    @@ -1,87 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket;
    -
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.Collections;
    -import java.util.List;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -/**
    - * A base class for all the single bucket aggregations.
    - */
    -public abstract class ParsedSingleBucketAggregation extends ParsedAggregation implements SingleBucketAggregation {
    -
    -    private long docCount;
    -    protected Aggregations aggregations = new Aggregations(Collections.emptyList());
    -
    -    @Override
    -    public long getDocCount() {
    -        return docCount;
    -    }
    -
    -    protected void setDocCount(long docCount) {
    -        this.docCount = docCount;
    -    }
    -
    -    @Override
    -    public Aggregations getAggregations() {
    -        return aggregations;
    -    }
    -
    -    @Override
    -    public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
    -        aggregations.toXContentInternal(builder, params);
    -        return builder;
    -    }
    -
    -    protected static  T parseXContent(final XContentParser parser, T aggregation, String name)
    -        throws IOException {
    -        aggregation.setName(name);
    -        XContentParser.Token token = parser.currentToken();
    -        String currentFieldName = parser.currentName();
    -        if (token == XContentParser.Token.FIELD_NAME) {
    -            token = parser.nextToken();
    -        }
    -        ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    -
    -        List aggregations = new ArrayList<>();
    -        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -            if (token == XContentParser.Token.FIELD_NAME) {
    -                currentFieldName = parser.currentName();
    -            } else if (token.isValue()) {
    -                if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                    aggregation.setDocCount(parser.longValue());
    -                }
    -            } else if (token == XContentParser.Token.START_OBJECT) {
    -                if (CommonFields.META.getPreferredName().equals(currentFieldName)) {
    -                    aggregation.metadata = parser.map();
    -                } else {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -        }
    -        aggregation.aggregations = new Aggregations(aggregations);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregation.java
    index 96c9184f5852b..b94cdcf28c669 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregation.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregation.java
    @@ -9,8 +9,8 @@
     package org.elasticsearch.search.aggregations.bucket;
     
     import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.HasAggregations;
    +import org.elasticsearch.search.aggregations.InternalAggregations;
     
     /**
      * A single bucket aggregation
    @@ -26,5 +26,5 @@ public interface SingleBucketAggregation extends Aggregation, HasAggregations {
          * @return  The sub-aggregations of this bucket
          */
         @Override
    -    Aggregations getAggregations();
    +    InternalAggregations getAggregations();
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java
    index 0d088e9406175..e0189c3dd6651 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java
    @@ -197,11 +197,11 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
             for (int i = 0; i < queue.size(); i++) {
                 bucketOrdsToCollect[i] = i;
             }
    -        InternalAggregations[] subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect);
             while (queue.size() > 0) {
                 int slot = queue.pop();
                 CompositeKey key = queue.toCompositeKey(slot);
    -            InternalAggregations aggs = subAggsForBuckets[slot];
    +            InternalAggregations aggs = subAggsForBuckets.apply(slot);
                 long docCount = queue.getDocCount(slot);
                 buckets[queue.size()] = new InternalComposite.InternalBucket(
                     sourceNames,
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java
    index a302c912e312a..1ddf208a2a86e 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java
    @@ -14,7 +14,6 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
    @@ -441,7 +440,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java
    deleted file mode 100644
    index 847e35cf0d4ea..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java
    +++ /dev/null
    @@ -1,109 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.composite;
    -
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ParseField;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.List;
    -import java.util.Map;
    -
    -public class ParsedComposite extends ParsedMultiBucketAggregation implements CompositeAggregation {
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedComposite.class.getSimpleName(),
    -        true,
    -        ParsedComposite::new
    -    );
    -
    -    static {
    -        PARSER.declareField(
    -            ParsedComposite::setAfterKey,
    -            (p, c) -> p.mapOrdered(),
    -            new ParseField("after_key"),
    -            ObjectParser.ValueType.OBJECT
    -        );
    -        declareMultiBucketAggregationFields(PARSER, ParsedBucket::fromXContent, parser -> null);
    -    }
    -
    -    private Map afterKey;
    -
    -    public static ParsedComposite fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedComposite aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        if (aggregation.afterKey == null && aggregation.getBuckets().size() > 0) {
    -            /**
    -             * Previous versions (< 6.3) don't send afterKey
    -             * in the response so we set it as the last returned buckets.
    -             */
    -            aggregation.setAfterKey(aggregation.getBuckets().get(aggregation.getBuckets().size() - 1).key);
    -        }
    -        return aggregation;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return CompositeAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    @Override
    -    public Map afterKey() {
    -        if (afterKey != null) {
    -            return afterKey;
    -        }
    -        return buckets.size() > 0 ? buckets.get(buckets.size() - 1).getKey() : null;
    -    }
    -
    -    private void setAfterKey(Map afterKey) {
    -        this.afterKey = afterKey;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        return CompositeAggregation.toXContentFragment(this, builder, params);
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements CompositeAggregation.Bucket {
    -        private Map key;
    -
    -        @Override
    -        public String getKeyAsString() {
    -            return key.toString();
    -        }
    -
    -        @Override
    -        public Map getKey() {
    -            return key;
    -        }
    -
    -        void setKey(Map key) {
    -            this.key = key;
    -        }
    -
    -        @Override
    -        public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            /**
    -             * See {@link CompositeAggregation#bucketToXContent}
    -             */
    -            throw new UnsupportedOperationException("not implemented");
    -        }
    -
    -        static ParsedComposite.ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseXContent(parser, false, ParsedBucket::new, (p, bucket) -> bucket.setKey(p.mapOrdered()));
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilter.java
    deleted file mode 100644
    index 32957cee586b4..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilter.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.filter;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedFilter extends ParsedSingleBucketAggregation implements Filter {
    -
    -    @Override
    -    public String getType() {
    -        return FilterAggregationBuilder.NAME;
    -    }
    -
    -    public static ParsedFilter fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedFilter(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java
    deleted file mode 100644
    index 149a5475aefac..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java
    +++ /dev/null
    @@ -1,148 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.filter;
    -
    -import org.elasticsearch.common.util.Maps;
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.Map;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -public class ParsedFilters extends ParsedMultiBucketAggregation implements Filters {
    -
    -    private Map bucketMap;
    -
    -    @Override
    -    public String getType() {
    -        return FiltersAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    @Override
    -    public ParsedBucket getBucketByKey(String key) {
    -        if (bucketMap == null) {
    -            bucketMap = Maps.newMapWithExpectedSize(buckets.size());
    -            for (ParsedBucket bucket : buckets) {
    -                bucketMap.put(bucket.getKey(), bucket);
    -            }
    -        }
    -        return bucketMap.get(key);
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedFilters.class.getSimpleName(),
    -        true,
    -        ParsedFilters::new
    -    );
    -    static {
    -        declareMultiBucketAggregationFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedFilters fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedFilters aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        // in case this is not a keyed aggregation, we need to add numeric keys to the buckets
    -        if (aggregation.keyed == false) {
    -            int i = 0;
    -            for (ParsedBucket bucket : aggregation.buckets) {
    -                if (bucket.key == null) {
    -                    bucket.key = String.valueOf(i);
    -                    i++;
    -                }
    -            }
    -        }
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Filters.Bucket {
    -
    -        private String key;
    -
    -        private boolean keyedBucket = true;
    -
    -        @Override
    -        public String getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            return key;
    -        }
    -
    -        @Override
    -        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            if (isKeyed()) {
    -                builder.startObject(key);
    -            } else {
    -                builder.startObject();
    -            }
    -            if (isKeyed() == false && keyedBucket == false) {
    -                builder.field(CommonFields.KEY.getPreferredName(), key);
    -            }
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
    -            final ParsedBucket bucket = new ParsedBucket();
    -            bucket.setKeyed(keyed);
    -            XContentParser.Token token = parser.currentToken();
    -            String currentFieldName = parser.currentName();
    -            if (keyed) {
    -                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    -                bucket.key = currentFieldName;
    -                ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    -            }
    -
    -            List aggregations = new ArrayList<>();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        bucket.key = parser.text();
    -                        bucket.keyedBucket = false;
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
    index 126528ef533fc..130db7f27aedb 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
    @@ -10,7 +10,6 @@
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
     import org.elasticsearch.xcontent.XContentBuilder;
    @@ -61,7 +60,7 @@ public long getDocCount() {
         }
     
         @Override
    -    public Aggregations getAggregations() {
    +    public InternalAggregations getAggregations() {
             return aggregations;
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
    deleted file mode 100644
    index c7a0f5b184a92..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
    +++ /dev/null
    @@ -1,40 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.List;
    -import java.util.function.Supplier;
    -
    -public abstract class ParsedGeoGrid extends ParsedMultiBucketAggregation implements GeoGrid {
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    public static ObjectParser createParser(
    -        Supplier supplier,
    -        CheckedFunction bucketParser,
    -        CheckedFunction keyedBucketParser
    -    ) {
    -        ObjectParser parser = new ObjectParser<>(ParsedGeoGrid.class.getSimpleName(), true, supplier);
    -        declareMultiBucketAggregationFields(parser, bucketParser, keyedBucketParser);
    -        return parser;
    -    }
    -
    -    public void setName(String name) {
    -        super.setName(name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
    deleted file mode 100644
    index 97e126c003170..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
    +++ /dev/null
    @@ -1,24 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -
    -import java.io.IOException;
    -
    -public abstract class ParsedGeoGridBucket extends ParsedMultiBucketAggregation.ParsedBucket implements GeoGrid.Bucket {
    -
    -    protected String hashAsString;
    -
    -    @Override
    -    protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -        return builder.field(Aggregation.CommonFields.KEY.getPreferredName(), hashAsString);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
    deleted file mode 100644
    index ada6fa1e73c4a..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
    +++ /dev/null
    @@ -1,34 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGeoHashGrid extends ParsedGeoGrid {
    -
    -    private static final ObjectParser PARSER = createParser(
    -        ParsedGeoHashGrid::new,
    -        ParsedGeoHashGridBucket::fromXContent,
    -        ParsedGeoHashGridBucket::fromXContent
    -    );
    -
    -    public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedGeoGrid aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return GeoHashGridAggregationBuilder.NAME;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
    deleted file mode 100644
    index c709099b56520..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
    +++ /dev/null
    @@ -1,30 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.common.geo.GeoPoint;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGeoHashGridBucket extends ParsedGeoGridBucket {
    -
    -    @Override
    -    public GeoPoint getKey() {
    -        return GeoPoint.fromGeohash(hashAsString);
    -    }
    -
    -    @Override
    -    public String getKeyAsString() {
    -        return hashAsString;
    -    }
    -
    -    static ParsedGeoHashGridBucket fromXContent(XContentParser parser) throws IOException {
    -        return parseXContent(parser, false, ParsedGeoHashGridBucket::new, (p, bucket) -> bucket.hashAsString = p.textOrNull());
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
    deleted file mode 100644
    index 88fa9954f3281..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
    +++ /dev/null
    @@ -1,34 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGeoTileGrid extends ParsedGeoGrid {
    -
    -    private static final ObjectParser PARSER = createParser(
    -        ParsedGeoTileGrid::new,
    -        ParsedGeoTileGridBucket::fromXContent,
    -        ParsedGeoTileGridBucket::fromXContent
    -    );
    -
    -    public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedGeoGrid aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return GeoTileGridAggregationBuilder.NAME;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
    deleted file mode 100644
    index e39252bc77118..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
    +++ /dev/null
    @@ -1,31 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.geogrid;
    -
    -import org.elasticsearch.common.geo.GeoPoint;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGeoTileGridBucket extends ParsedGeoGridBucket {
    -
    -    @Override
    -    public GeoPoint getKey() {
    -        return GeoTileUtils.keyToGeoPoint(hashAsString);
    -    }
    -
    -    @Override
    -    public String getKeyAsString() {
    -        return hashAsString;
    -    }
    -
    -    static ParsedGeoTileGridBucket fromXContent(XContentParser parser) throws IOException {
    -        return parseXContent(parser, false, ParsedGeoTileGridBucket::new, (p, bucket) -> bucket.hashAsString = p.text());
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/ParsedGlobal.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/ParsedGlobal.java
    deleted file mode 100644
    index 7d94351ccc5a0..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/ParsedGlobal.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.global;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGlobal extends ParsedSingleBucketAggregation implements Global {
    -
    -    @Override
    -    public String getType() {
    -        return GlobalAggregationBuilder.NAME;
    -    }
    -
    -    public static ParsedGlobal fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedGlobal(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
    index 4f94e2061caa1..c164067ea6504 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
    @@ -14,6 +14,7 @@
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.core.TimeValue;
    +import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
     import org.elasticsearch.search.aggregations.AggregationBuilder;
     import org.elasticsearch.search.aggregations.AggregatorFactories;
     import org.elasticsearch.search.aggregations.AggregatorFactory;
    @@ -36,6 +37,7 @@
     import java.util.List;
     import java.util.Map;
     import java.util.Objects;
    +import java.util.SimpleTimeZone;
     import java.util.function.Consumer;
     
     import static java.util.Map.entry;
    @@ -406,23 +408,46 @@ protected ValuesSourceAggregatorFactory innerBuild(
         ) throws IOException {
             final DateIntervalWrapper.IntervalTypeEnum dateHistogramIntervalType = dateHistogramInterval.getIntervalType();
     
    -        if (context.getIndexSettings().getIndexMetadata().isDownsampledIndex()
    -            && DateIntervalWrapper.IntervalTypeEnum.CALENDAR.equals(dateHistogramIntervalType)) {
    -            throw new IllegalArgumentException(
    -                config.getDescription()
    -                    + " is not supported for aggregation ["
    -                    + getName()
    -                    + "] with interval type ["
    -                    + dateHistogramIntervalType.getPreferredName()
    -                    + "]"
    -            );
    -        }
    -
    +        boolean downsampledResultsOffset = false;
             final ZoneId tz = timeZone();
    -        if (context.getIndexSettings().getIndexMetadata().isDownsampledIndex() && tz != null && ZoneId.of("UTC").equals(tz) == false) {
    -            throw new IllegalArgumentException(
    -                config.getDescription() + " is not supported for aggregation [" + getName() + "] with timezone [" + tz + "]"
    -            );
    +
    +        String downsamplingInterval = context.getIndexSettings().getIndexMetadata().getDownsamplingInterval();
    +        if (downsamplingInterval != null) {
    +            if (DateIntervalWrapper.IntervalTypeEnum.CALENDAR.equals(dateHistogramIntervalType)) {
    +                throw new IllegalArgumentException(
    +                    config.getDescription()
    +                        + " is not supported for aggregation ["
    +                        + getName()
    +                        + "] with interval type ["
    +                        + dateHistogramIntervalType.getPreferredName()
    +                        + "]"
    +                );
    +            }
    +
    +            // Downsampled data in time-series indexes contain aggregated values that get calculated over UTC-based intervals.
    +            // When they get aggregated using a different timezone, the resulting buckets may need to be offset to account for
    +            // the difference between UTC (where stored data refers to) and the requested timezone. For instance:
    +            // a. A TZ shifted by -01:15 over hourly downsampled data will lead to buckets with times XX:45, instead of XX:00
    +            // b. A TZ shifted by +07:00 over daily downsampled data will lead to buckets with times 07:00, instead of 00:00
    +            // c. Intervals over DST are approximate, not including gaps in time buckets. This applies to date histogram aggregation in
    +            // general.
    +            if (tz != null && ZoneId.of("UTC").equals(tz) == false && field().equals(DataStreamTimestampFieldMapper.DEFAULT_PATH)) {
    +
    +                // Get the downsampling interval.
    +                DateHistogramInterval interval = new DateHistogramInterval(downsamplingInterval);
    +                long downsamplingResolution = interval.estimateMillis();
    +                long aggregationResolution = dateHistogramInterval.getAsFixedInterval().estimateMillis();
    +
    +                // If the aggregation resolution is not a multiple of the downsampling resolution, the reported time for each
    +                // bucket needs to be shifted by the mod - in addition to rounding that's applied as usual.
    +                // Note that the aggregation resolution gets shifted to match the specified timezone. Timezone.getOffset() normally expects
    +                // a date but it can also process an offset (interval) in milliseconds as it uses the Unix epoch for reference.
    +                long aggregationOffset = SimpleTimeZone.getTimeZone(tz).getOffset(aggregationResolution) % downsamplingResolution;
    +                if (aggregationOffset != 0) {
    +                    downsampledResultsOffset = true;
    +                    offset += aggregationOffset;
    +                }
    +            }
             }
     
             DateHistogramAggregationSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config);
    @@ -473,6 +498,7 @@ protected ValuesSourceAggregatorFactory innerBuild(
                 order,
                 keyed,
                 minDocCount,
    +            downsampledResultsOffset,
                 rounding,
                 roundedBounds,
                 roundedHardBounds,
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java
    index 1529d0fab6cc9..b3f002e8b83a7 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java
    @@ -29,6 +29,7 @@ Aggregator build(
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             @Nullable LongBounds extendedBounds,
             @Nullable LongBounds hardBounds,
             ValuesSourceConfig valuesSourceConfig,
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
    index ea08d5960d704..8f5323dfc9d2b 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
    @@ -79,6 +79,7 @@ public static Aggregator build(
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             @Nullable LongBounds extendedBounds,
             @Nullable LongBounds hardBounds,
             ValuesSourceConfig valuesSourceConfig,
    @@ -96,6 +97,7 @@ public static Aggregator build(
                 order,
                 keyed,
                 minDocCount,
    +            downsampledResultsOffset,
                 extendedBounds,
                 hardBounds,
                 valuesSourceConfig,
    @@ -115,6 +117,7 @@ public static Aggregator build(
                 order,
                 keyed,
                 minDocCount,
    +            downsampledResultsOffset,
                 extendedBounds,
                 hardBounds,
                 valuesSourceConfig,
    @@ -133,6 +136,7 @@ private static FromDateRange adaptIntoRangeOrNull(
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             @Nullable LongBounds extendedBounds,
             @Nullable LongBounds hardBounds,
             ValuesSourceConfig valuesSourceConfig,
    @@ -191,6 +195,7 @@ private static FromDateRange adaptIntoRangeOrNull(
                 minDocCount,
                 extendedBounds,
                 keyed,
    +            downsampledResultsOffset,
                 fixedRoundingPoints
             );
         }
    @@ -227,6 +232,7 @@ private static RangeAggregator.Range[] ranges(LongBounds hardBounds, long[] fixe
         private final boolean keyed;
     
         private final long minDocCount;
    +    private final boolean downsampledResultsOffset;
         private final LongBounds extendedBounds;
         private final LongBounds hardBounds;
     
    @@ -240,6 +246,7 @@ private static RangeAggregator.Range[] ranges(LongBounds hardBounds, long[] fixe
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             @Nullable LongBounds extendedBounds,
             @Nullable LongBounds hardBounds,
             ValuesSourceConfig valuesSourceConfig,
    @@ -255,6 +262,7 @@ private static RangeAggregator.Range[] ranges(LongBounds hardBounds, long[] fixe
             order.validate(this);
             this.keyed = keyed;
             this.minDocCount = minDocCount;
    +        this.downsampledResultsOffset = downsampledResultsOffset;
             this.extendedBounds = extendedBounds;
             this.hardBounds = hardBounds;
             // TODO: Stop using null here
    @@ -328,6 +336,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
                     emptyBucketInfo,
                     formatter,
                     keyed,
    +                downsampledResultsOffset,
                     metadata()
                 );
             });
    @@ -347,6 +356,7 @@ public InternalAggregation buildEmptyAggregation() {
                 emptyBucketInfo,
                 formatter,
                 keyed,
    +            downsampledResultsOffset,
                 metadata()
             );
         }
    @@ -392,6 +402,7 @@ static class FromDateRange extends AdaptingAggregator implements SizedBucketAggr
             private final long minDocCount;
             private final LongBounds extendedBounds;
             private final boolean keyed;
    +        private final boolean downsampledResultsOffset;
             private final long[] fixedRoundingPoints;
     
             FromDateRange(
    @@ -405,6 +416,7 @@ static class FromDateRange extends AdaptingAggregator implements SizedBucketAggr
                 long minDocCount,
                 LongBounds extendedBounds,
                 boolean keyed,
    +            boolean downsampledResultsOffset,
                 long[] fixedRoundingPoints
             ) throws IOException {
                 super(parent, subAggregators, delegate);
    @@ -416,6 +428,7 @@ static class FromDateRange extends AdaptingAggregator implements SizedBucketAggr
                 this.minDocCount = minDocCount;
                 this.extendedBounds = extendedBounds;
                 this.keyed = keyed;
    +            this.downsampledResultsOffset = downsampledResultsOffset;
                 this.fixedRoundingPoints = fixedRoundingPoints;
             }
     
    @@ -454,6 +467,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) {
                     emptyBucketInfo,
                     format,
                     keyed,
    +                downsampledResultsOffset,
                     range.getMetadata()
                 );
             }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
    index 1a75766c40a6b..bb12f4588ef80 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
    @@ -52,6 +52,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
                     order,
                     keyed,
                     minDocCount,
    +                downsampledResultsOffset,
                     extendedBounds,
                     hardBounds,
                     valuesSourceConfig,
    @@ -71,6 +72,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
                         order,
                         keyed,
                         minDocCount,
    +                    downsampledResultsOffset,
                         extendedBounds,
                         hardBounds,
                         valuesSourceConfig,
    @@ -88,6 +90,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
         private final BucketOrder order;
         private final boolean keyed;
         private final long minDocCount;
    +    private final boolean downsampledResultsOffset;
         private final LongBounds extendedBounds;
         private final LongBounds hardBounds;
         private final Rounding rounding;
    @@ -98,6 +101,7 @@ public DateHistogramAggregatorFactory(
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             Rounding rounding,
             LongBounds extendedBounds,
             LongBounds hardBounds,
    @@ -111,6 +115,7 @@ public DateHistogramAggregatorFactory(
             this.aggregatorSupplier = aggregationSupplier;
             this.order = order;
             this.keyed = keyed;
    +        this.downsampledResultsOffset = downsampledResultsOffset;
             this.minDocCount = minDocCount;
             this.extendedBounds = extendedBounds;
             this.hardBounds = hardBounds;
    @@ -139,6 +144,7 @@ protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound c
                 order,
                 keyed,
                 minDocCount,
    +            downsampledResultsOffset,
                 extendedBounds,
                 hardBounds,
                 config,
    @@ -159,6 +165,7 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad
                 order,
                 keyed,
                 minDocCount,
    +            downsampledResultsOffset,
                 extendedBounds,
                 hardBounds,
                 config,
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java
    index 34720f3f2f643..5fe44aa694cc5 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java
    @@ -59,6 +59,7 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
         private final boolean keyed;
     
         private final long minDocCount;
    +    private final boolean downsampledResultsOffset;
         private final LongBounds extendedBounds;
         private final LongBounds hardBounds;
     
    @@ -71,6 +72,7 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
             BucketOrder order,
             boolean keyed,
             long minDocCount,
    +        boolean downsampledResultsOffset,
             @Nullable LongBounds extendedBounds,
             @Nullable LongBounds hardBounds,
             ValuesSourceConfig valuesSourceConfig,
    @@ -87,6 +89,7 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
             order.validate(this);
             this.keyed = keyed;
             this.minDocCount = minDocCount;
    +        this.downsampledResultsOffset = downsampledResultsOffset;
             this.extendedBounds = extendedBounds;
             this.hardBounds = hardBounds;
             // TODO: Stop using null here
    @@ -197,6 +200,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
                         emptyBucketInfo,
                         formatter,
                         keyed,
    +                    downsampledResultsOffset,
                         metadata()
                     );
                 }
    @@ -217,6 +221,7 @@ public InternalAggregation buildEmptyAggregation() {
                 emptyBucketInfo,
                 formatter,
                 keyed,
    +            downsampledResultsOffset,
                 metadata()
             );
         }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
    index 4ffc9abdc2202..8a7561aaab574 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
    @@ -9,12 +9,13 @@
     
     import org.apache.lucene.util.CollectionUtil;
     import org.apache.lucene.util.PriorityQueue;
    +import org.elasticsearch.TransportVersion;
    +import org.elasticsearch.TransportVersions;
     import org.elasticsearch.common.Rounding;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.BucketOrder;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
    @@ -111,7 +112,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    @@ -202,6 +203,7 @@ public int hashCode() {
         private final BucketOrder order;
         private final DocValueFormat format;
         private final boolean keyed;
    +    private final boolean downsampledResultsOffset;
         private final long minDocCount;
         private final long offset;
         final EmptyBucketInfo emptyBucketInfo;
    @@ -215,6 +217,7 @@ public int hashCode() {
             EmptyBucketInfo emptyBucketInfo,
             DocValueFormat formatter,
             boolean keyed,
    +        boolean downsampledResultsOffset,
             Map metadata
         ) {
             super(name, metadata);
    @@ -226,6 +229,15 @@ public int hashCode() {
             this.emptyBucketInfo = emptyBucketInfo;
             this.format = formatter;
             this.keyed = keyed;
    +        this.downsampledResultsOffset = downsampledResultsOffset;
    +    }
    +
    +    boolean versionSupportsDownsamplingTimezone(TransportVersion version) {
    +        return version.onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ)
    +            || version.between(
    +                TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH,
    +                TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED
    +            );
         }
     
         /**
    @@ -243,6 +255,11 @@ public InternalDateHistogram(StreamInput in) throws IOException {
             offset = in.readLong();
             format = in.readNamedWriteable(DocValueFormat.class);
             keyed = in.readBoolean();
    +        if (versionSupportsDownsamplingTimezone(in.getTransportVersion())) {
    +            downsampledResultsOffset = in.readBoolean();
    +        } else {
    +            downsampledResultsOffset = false;
    +        }
             buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format));
         }
     
    @@ -256,6 +273,9 @@ protected void doWriteTo(StreamOutput out) throws IOException {
             out.writeLong(offset);
             out.writeNamedWriteable(format);
             out.writeBoolean(keyed);
    +        if (versionSupportsDownsamplingTimezone(out.getTransportVersion())) {
    +            out.writeBoolean(downsampledResultsOffset);
    +        }
             out.writeCollection(buckets);
         }
     
    @@ -283,7 +303,18 @@ BucketOrder getOrder() {
     
         @Override
         public InternalDateHistogram create(List buckets) {
    -        return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format, keyed, metadata);
    +        return new InternalDateHistogram(
    +            name,
    +            buckets,
    +            order,
    +            minDocCount,
    +            offset,
    +            emptyBucketInfo,
    +            format,
    +            keyed,
    +            downsampledResultsOffset,
    +            metadata
    +        );
         }
     
         @Override
    @@ -367,7 +398,7 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con
             long docCount = 0;
             for (Bucket bucket : buckets) {
                 docCount += bucket.docCount;
    -            aggregations.add((InternalAggregations) bucket.getAggregations());
    +            aggregations.add(bucket.getAggregations());
             }
             InternalAggregations aggs = InternalAggregations.reduce(aggregations, context);
             return createBucket(buckets.get(0).key, docCount, aggs);
    @@ -508,6 +539,7 @@ public InternalAggregation reduce(List aggregations, Aggreg
                 emptyBucketInfo,
                 format,
                 keyed,
    +            downsampledResultsOffset,
                 getMetadata()
             );
         }
    @@ -523,6 +555,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
                 emptyBucketInfo,
                 format,
                 keyed,
    +            downsampledResultsOffset,
                 getMetadata()
             );
         }
    @@ -542,6 +575,12 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
             } else {
                 builder.endArray();
             }
    +        if (downsampledResultsOffset) {
    +            // Indicates that the dates reported in the buckets over downsampled indexes are offset
    +            // to match the intervals at UTC, since downsampling always uses UTC-based intervals
    +            // to calculate aggregated values.
    +            builder.field("downsampled_results_offset", Boolean.TRUE);
    +        }
             return builder;
         }
     
    @@ -570,7 +609,18 @@ public InternalAggregation createAggregation(List
    +public class InternalHistogram extends InternalMultiBucketAggregation
         implements
             Histogram,
             HistogramFactory {
    @@ -107,7 +106,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    @@ -354,7 +353,7 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con
             long docCount = 0;
             for (Bucket bucket : buckets) {
                 docCount += bucket.docCount;
    -            aggregations.add((InternalAggregations) bucket.getAggregations());
    +            aggregations.add(bucket.getAggregations());
             }
             InternalAggregations aggs = InternalAggregations.reduce(aggregations, context);
             return createBucket(buckets.get(0).key, docCount, aggs);
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java
    index 5686c0ea11dfa..59bb251368c2e 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java
    @@ -13,7 +13,6 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
    @@ -152,7 +151,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    @@ -318,7 +317,7 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con
                 min = Math.min(min, bucket.bounds.min);
                 max = Math.max(max, bucket.bounds.max);
                 sum += bucket.docCount * bucket.centroid;
    -            aggregations.add((InternalAggregations) bucket.getAggregations());
    +            aggregations.add(bucket.getAggregations());
             }
             InternalAggregations aggs = InternalAggregations.reduce(aggregations, context);
             double centroid = sum / docCount;
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
    deleted file mode 100644
    index 5445bc2126277..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
    +++ /dev/null
    @@ -1,85 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.histogram;
    -
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.time.Instant;
    -import java.time.ZoneOffset;
    -import java.util.List;
    -
    -public class ParsedDateHistogram extends ParsedMultiBucketAggregation implements Histogram {
    -
    -    @Override
    -    public String getType() {
    -        return DateHistogramAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedDateHistogram.class.getSimpleName(),
    -        true,
    -        ParsedDateHistogram::new
    -    );
    -    static {
    -        declareMultiBucketAggregationFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedDateHistogram fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedDateHistogram aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
    -
    -        private Long key;
    -
    -        @Override
    -        public Object getKey() {
    -            if (key != null) {
    -                return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
    -            }
    -            return null;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Long.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            return builder.field(CommonFields.KEY.getPreferredName(), key);
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
    -            return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java
    deleted file mode 100644
    index a0ebcb0e6d207..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java
    +++ /dev/null
    @@ -1,74 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.histogram;
    -
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.List;
    -
    -public class ParsedHistogram extends ParsedMultiBucketAggregation implements Histogram {
    -
    -    @Override
    -    public String getType() {
    -        return HistogramAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedHistogram.class.getSimpleName(),
    -        true,
    -        ParsedHistogram::new
    -    );
    -    static {
    -        declareMultiBucketAggregationFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedHistogram fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedHistogram aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
    -
    -        private Double key;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Double.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
    -            return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java
    deleted file mode 100644
    index de7f29d785c75..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java
    +++ /dev/null
    @@ -1,184 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.histogram;
    -
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -public class ParsedVariableWidthHistogram extends ParsedMultiBucketAggregation
    -    implements
    -        Histogram {
    -
    -    @Override
    -    public String getType() {
    -        return VariableWidthHistogramAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedVariableWidthHistogram.class.getSimpleName(),
    -        true,
    -        ParsedVariableWidthHistogram::new
    -    );
    -    static {
    -        declareMultiBucketAggregationFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedVariableWidthHistogram fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedVariableWidthHistogram aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
    -        private Double key;
    -
    -        private Double min;
    -        private Double max;
    -
    -        private String minAsString;
    -        private String maxAsString;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Double.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        public void setMin(Double min) {
    -            this.min = min;
    -        }
    -
    -        public void setMinAsString(String minAsString) {
    -            this.minAsString = minAsString;
    -        }
    -
    -        public double getMin() {
    -            return min;
    -        }
    -
    -        public void setMax(Double max) {
    -            this.max = max;
    -        }
    -
    -        public void setMaxAsString(String maxAsString) {
    -            this.maxAsString = maxAsString;
    -        }
    -
    -        public double getMax() {
    -            return max;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
    -            final ParsedBucket bucket = new ParsedBucket();
    -            bucket.setKeyed(keyed);
    -            XContentParser.Token token = parser.currentToken();
    -            String currentFieldName = parser.currentName();
    -            if (keyed) {
    -                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    -                ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    -            }
    -
    -            List aggregations = new ArrayList<>();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        bucket.key = parser.doubleValue();
    -                    } else if (CommonFields.MIN_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setMinAsString(parser.text());
    -                    } else if (CommonFields.MIN.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setMin(parser.doubleValue());
    -                    } else if (CommonFields.MAX_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setMaxAsString(parser.text());
    -                    } else if (CommonFields.MAX.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setMax(parser.doubleValue());
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        bucket.key = parser.doubleValue();
    -                    } else {
    -                        XContentParserUtils.parseTypedKeysObject(
    -                            parser,
    -                            Aggregation.TYPED_KEYS_DELIMITER,
    -                            Aggregation.class,
    -                            aggregations::add
    -                        );
    -                    }
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -
    -        @Override
    -        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            if (isKeyed()) {
    -                builder.startObject(getKeyAsString());
    -            } else {
    -                builder.startObject();
    -            }
    -
    -            if (minAsString != null) {
    -                builder.field(CommonFields.MIN_AS_STRING.getPreferredName(), minAsString);
    -            }
    -            builder.field(CommonFields.MIN.getPreferredName(), getMin());
    -
    -            if (super.getKeyAsString() != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            keyToXContent(builder);
    -
    -            if (maxAsString != null) {
    -                builder.field(CommonFields.MAX_AS_STRING.getPreferredName(), maxAsString);
    -            }
    -            builder.field(CommonFields.MAX.getPreferredName(), getMax());
    -
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
    index 945ecd7424de3..55063c0af4010 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
    @@ -555,11 +555,11 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
                 bucketOrdsToCollect[i] = i;
             }
     
    -        InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
     
             List buckets = new ArrayList<>(numClusters);
             for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) {
    -            buckets.add(collector.buildBucket(bucketOrd, subAggregationResults[bucketOrd]));
    +            buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd)));
             }
     
             Function, InternalAggregation> resultBuilder = bucketsToFormat -> {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/ParsedMissing.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/ParsedMissing.java
    deleted file mode 100644
    index f5065be55eb75..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/ParsedMissing.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.missing;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedMissing extends ParsedSingleBucketAggregation implements Missing {
    -
    -    @Override
    -    public String getType() {
    -        return MissingAggregationBuilder.NAME;
    -    }
    -
    -    public static ParsedMissing fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedMissing(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedNested.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedNested.java
    deleted file mode 100644
    index fadd915720a92..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedNested.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.nested;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedNested extends ParsedSingleBucketAggregation implements Nested {
    -
    -    @Override
    -    public String getType() {
    -        return NestedAggregationBuilder.NAME;
    -    }
    -
    -    public static ParsedNested fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedNested(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedReverseNested.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedReverseNested.java
    deleted file mode 100644
    index f356d2d431259..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ParsedReverseNested.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.nested;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedReverseNested extends ParsedSingleBucketAggregation implements ReverseNested {
    -
    -    @Override
    -    public String getType() {
    -        return ReverseNestedAggregationBuilder.NAME;
    -    }
    -
    -    public static ParsedReverseNested fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedReverseNested(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java
    index eb8b0f95047b9..ec95052f5c3f5 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java
    @@ -19,7 +19,6 @@
     import org.elasticsearch.search.aggregations.BucketOrder;
     import org.elasticsearch.search.aggregations.CardinalityUpperBound;
     import org.elasticsearch.search.aggregations.InternalAggregation;
    -import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.LeafBucketCollector;
     import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
     import org.elasticsearch.search.aggregations.NonCollectingAggregator;
    @@ -170,7 +169,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
                 }
             }
     
    -        InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
    +        var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
             InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
             b = 0;
             for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
    @@ -193,7 +192,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I
                             ipPrefix.prefixLength,
                             ipPrefix.appendPrefixLength,
                             docCount,
    -                        subAggregationResults[b++]
    +                        subAggregationResults.apply(b++)
                         )
                     );
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java
    index c8588136c1d33..131be36db2956 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java
    @@ -14,7 +14,6 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
    @@ -117,7 +116,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java
    deleted file mode 100644
    index db9a7301db790..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java
    +++ /dev/null
    @@ -1,151 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.range;
    -
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -public class ParsedBinaryRange extends ParsedMultiBucketAggregation implements Range {
    -
    -    @Override
    -    public String getType() {
    -        return IpRangeAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedBinaryRange.class.getSimpleName(),
    -        true,
    -        ParsedBinaryRange::new
    -    );
    -    static {
    -        declareMultiBucketAggregationFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedBinaryRange fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedBinaryRange aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Range.Bucket {
    -
    -        private String key;
    -        private String from;
    -        private String to;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            return key;
    -        }
    -
    -        @Override
    -        public Object getFrom() {
    -            return from;
    -        }
    -
    -        @Override
    -        public String getFromAsString() {
    -            return from;
    -        }
    -
    -        @Override
    -        public Object getTo() {
    -            return to;
    -        }
    -
    -        @Override
    -        public String getToAsString() {
    -            return to;
    -        }
    -
    -        @Override
    -        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            if (isKeyed()) {
    -                builder.startObject(key);
    -            } else {
    -                builder.startObject();
    -                builder.field(CommonFields.KEY.getPreferredName(), key);
    -            }
    -            if (from != null) {
    -                builder.field(CommonFields.FROM.getPreferredName(), from);
    -            }
    -            if (to != null) {
    -                builder.field(CommonFields.TO.getPreferredName(), to);
    -            }
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
    -            final ParsedBucket bucket = new ParsedBucket();
    -            bucket.setKeyed(keyed);
    -            XContentParser.Token token = parser.currentToken();
    -            String currentFieldName = parser.currentName();
    -
    -            if (keyed) {
    -                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    -                bucket.key = currentFieldName;
    -                ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    -            }
    -
    -            List aggregations = new ArrayList<>();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        bucket.key = parser.text();
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    } else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) {
    -                        bucket.from = parser.text();
    -                    } else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) {
    -                        bucket.to = parser.text();
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
    deleted file mode 100644
    index 8ba02ebd031f4..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
    +++ /dev/null
    @@ -1,68 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.range;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.time.Instant;
    -import java.time.ZoneOffset;
    -import java.time.ZonedDateTime;
    -
    -public class ParsedDateRange extends ParsedRange {
    -
    -    @Override
    -    public String getType() {
    -        return DateRangeAggregationBuilder.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedDateRange.class.getSimpleName(),
    -        true,
    -        ParsedDateRange::new
    -    );
    -    static {
    -        declareParsedRangeFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedDateRange fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedDateRange aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedRange.ParsedBucket {
    -
    -        @Override
    -        public Object getFrom() {
    -            return doubleAsDateTime(from);
    -        }
    -
    -        @Override
    -        public Object getTo() {
    -            return doubleAsDateTime(to);
    -        }
    -
    -        private static ZonedDateTime doubleAsDateTime(Double d) {
    -            if (d == null || Double.isInfinite(d)) {
    -                return null;
    -            }
    -            return Instant.ofEpochMilli(d.longValue()).atZone(ZoneOffset.UTC);
    -        }
    -
    -        static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
    -            return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java
    deleted file mode 100644
    index 62d63712f3695..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java
    +++ /dev/null
    @@ -1,48 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.range;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedGeoDistance extends ParsedRange {
    -
    -    @Override
    -    public String getType() {
    -        return GeoDistanceAggregationBuilder.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedGeoDistance.class.getSimpleName(),
    -        true,
    -        ParsedGeoDistance::new
    -    );
    -    static {
    -        declareParsedRangeFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedGeoDistance fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedGeoDistance aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedRange.ParsedBucket {
    -
    -        static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
    -            return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java
    deleted file mode 100644
    index a12c126fb73d8..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java
    +++ /dev/null
    @@ -1,196 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.range;
    -
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.function.Supplier;
    -
    -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    -
    -public class ParsedRange extends ParsedMultiBucketAggregation implements Range {
    -
    -    @Override
    -    public String getType() {
    -        return RangeAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    protected static void declareParsedRangeFields(
    -        final ObjectParser objectParser,
    -        final CheckedFunction bucketParser,
    -        final CheckedFunction keyedBucketParser
    -    ) {
    -        declareMultiBucketAggregationFields(objectParser, bucketParser, keyedBucketParser);
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedRange.class.getSimpleName(),
    -        true,
    -        ParsedRange::new
    -    );
    -    static {
    -        declareParsedRangeFields(
    -            PARSER,
    -            parser -> ParsedBucket.fromXContent(parser, false),
    -            parser -> ParsedBucket.fromXContent(parser, true)
    -        );
    -    }
    -
    -    public static ParsedRange fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedRange aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Range.Bucket {
    -
    -        protected String key;
    -        protected double from = Double.NEGATIVE_INFINITY;
    -        protected String fromAsString;
    -        protected double to = Double.POSITIVE_INFINITY;
    -        protected String toAsString;
    -
    -        @Override
    -        public String getKey() {
    -            return getKeyAsString();
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            return key;
    -        }
    -
    -        @Override
    -        public Object getFrom() {
    -            return from;
    -        }
    -
    -        @Override
    -        public String getFromAsString() {
    -            if (fromAsString != null) {
    -                return fromAsString;
    -            }
    -            return doubleAsString(from);
    -        }
    -
    -        @Override
    -        public Object getTo() {
    -            return to;
    -        }
    -
    -        @Override
    -        public String getToAsString() {
    -            if (toAsString != null) {
    -                return toAsString;
    -            }
    -            return doubleAsString(to);
    -        }
    -
    -        @Override
    -        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            if (isKeyed()) {
    -                builder.startObject(key);
    -            } else {
    -                builder.startObject();
    -                builder.field(CommonFields.KEY.getPreferredName(), key);
    -            }
    -            if (Double.isInfinite(from) == false) {
    -                builder.field(CommonFields.FROM.getPreferredName(), from);
    -                if (fromAsString != null) {
    -                    builder.field(CommonFields.FROM_AS_STRING.getPreferredName(), fromAsString);
    -                }
    -            }
    -            if (Double.isInfinite(to) == false) {
    -                builder.field(CommonFields.TO.getPreferredName(), to);
    -                if (toAsString != null) {
    -                    builder.field(CommonFields.TO_AS_STRING.getPreferredName(), toAsString);
    -                }
    -            }
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        private static String doubleAsString(double d) {
    -            return Double.isInfinite(d) ? null : Double.toString(d);
    -        }
    -
    -        protected static  B parseRangeBucketXContent(
    -            final XContentParser parser,
    -            final Supplier bucketSupplier,
    -            final boolean keyed
    -        ) throws IOException {
    -            final B bucket = bucketSupplier.get();
    -            bucket.setKeyed(keyed);
    -            XContentParser.Token token = parser.currentToken();
    -            String currentFieldName = parser.currentName();
    -            if (keyed) {
    -                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    -                bucket.key = currentFieldName;
    -                ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    -            }
    -
    -            List aggregations = new ArrayList<>();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        bucket.key = parser.text();
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    } else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) {
    -                        bucket.from = parser.doubleValue();
    -                    } else if (CommonFields.FROM_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.fromAsString = parser.text();
    -                    } else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) {
    -                        bucket.to = parser.doubleValue();
    -                    } else if (CommonFields.TO_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.toAsString = parser.text();
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -
    -        static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
    -            return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/ParsedSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/ParsedSampler.java
    deleted file mode 100644
    index 3d29205a91b15..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/ParsedSampler.java
    +++ /dev/null
    @@ -1,25 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.bucket.sampler;
    -
    -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedSampler extends ParsedSingleBucketAggregation implements Sampler {
    -
    -    @Override
    -    public String getType() {
    -        return InternalSampler.PARSER_NAME;
    -    }
    -
    -    public static ParsedSampler fromXContent(XContentParser parser, final String name) throws IOException {
    -        return parseXContent(parser, new ParsedSampler(), name);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java
    index b444a1ef8f4d7..5b72b1396def2 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java
    @@ -85,9 +85,7 @@ public InternalAggregation reduce(List aggregations, Aggreg
             InternalAggregations aggs = InternalAggregations.reduce(subAggregationsList, reduceContext);
             if (reduceContext.isFinalReduce() && aggs != null) {
                 SamplingContext context = buildContext();
    -            aggs = InternalAggregations.from(
    -                aggs.asList().stream().map(agg -> ((InternalAggregation) agg).finalizeSampling(context)).toList()
    -            );
    +            aggs = InternalAggregations.from(aggs.asList().stream().map(agg -> agg.finalizeSampling(context)).toList());
             }
     
             return newAggregation(getName(), docCount, aggs);
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java
    index aed2119dec483..ca3142a0c0797 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java
    @@ -105,7 +105,7 @@ public B reduceBucket(List buckets, AggregationReduceContext context) {
                         docCountError += bucket.getDocCountError();
                     }
                 }
    -            aggregationsList.add((InternalAggregations) bucket.getAggregations());
    +            aggregationsList.add(bucket.getAggregations());
             }
             InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context);
             return createBucket(docCount, aggs, docCountError, buckets.get(0));
    @@ -346,7 +346,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
                     .map(
                         b -> createBucket(
                             samplingContext.scaleUp(b.getDocCount()),
    -                        InternalAggregations.finalizeSampling((InternalAggregations) b.getAggregations(), samplingContext),
    +                        InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext),
                             b.getShowDocCountError() ? samplingContext.scaleUp(b.getDocCountError()) : 0,
                             b
                         )
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java
    index 6cd4c76317106..f3ce541b1b8b9 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java
    @@ -12,7 +12,6 @@
     import org.elasticsearch.common.util.SetBackedScalingCuckooFilter;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.BucketOrder;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
    @@ -80,7 +79,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java
    index 07aa318e9c487..d627be186f8ff 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java
    @@ -11,7 +11,6 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.Aggregator;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
    @@ -123,7 +122,7 @@ public long getDocCount() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java
    index 4cd963296ab12..66a3ddb2c94c4 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java
    @@ -10,7 +10,6 @@
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.search.DocValueFormat;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.BucketOrder;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalOrder;
    @@ -128,7 +127,7 @@ protected boolean getShowDocCountError() {
             }
     
             @Override
    -        public Aggregations getAggregations() {
    +        public InternalAggregations getAggregations() {
                 return aggregations;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java
    index c57b7e696deb4..21a26507f461b 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java
    @@ -245,7 +245,7 @@ static DoubleTerms convertLongTermsToDouble(LongTerms longTerms, DocValueFormat
                     new DoubleTerms.Bucket(
                         bucket.getKeyAsNumber().doubleValue(),
                         bucket.getDocCount(),
    -                    (InternalAggregations) bucket.getAggregations(),
    +                    bucket.getAggregations(),
                         longTerms.showTermDocCountError,
                         longTerms.showTermDocCountError ? bucket.getDocCountError() : 0,
                         decimalFormat
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java
    deleted file mode 100644
    index d50eb0713e337..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java
    +++ /dev/null
    @@ -1,77 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedDoubleTerms extends ParsedTerms {
    -
    -    @Override
    -    public String getType() {
    -        return DoubleTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedDoubleTerms.class.getSimpleName(),
    -        true,
    -        ParsedDoubleTerms::new
    -    );
    -    static {
    -        declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedDoubleTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedDoubleTerms aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedTerms.ParsedBucket {
    -
    -        private Double key;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Double.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        public Number getKeyAsNumber() {
    -            return key;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            builder.field(CommonFields.KEY.getPreferredName(), key);
    -            if (super.getKeyAsString() != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            return builder;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java
    deleted file mode 100644
    index 616bfb3d5a115..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java
    +++ /dev/null
    @@ -1,73 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedLongRareTerms extends ParsedRareTerms {
    -    @Override
    -    public String getType() {
    -        return LongRareTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedLongRareTerms.class.getSimpleName(),
    -        true,
    -        ParsedLongRareTerms::new
    -    );
    -
    -    static {
    -        declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedLongRareTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedLongRareTerms aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedRareTerms.ParsedBucket {
    -
    -        private Long key;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Long.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            builder.field(CommonFields.KEY.getPreferredName(), key);
    -            if (super.getKeyAsString() != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            return builder;
    -        }
    -
    -        static ParsedLongRareTerms.ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseRareTermsBucketXContent(parser, ParsedLongRareTerms.ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java
    deleted file mode 100644
    index 2b7a2c4487c6e..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java
    +++ /dev/null
    @@ -1,77 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedLongTerms extends ParsedTerms {
    -
    -    @Override
    -    public String getType() {
    -        return LongTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedLongTerms.class.getSimpleName(),
    -        true,
    -        ParsedLongTerms::new
    -    );
    -    static {
    -        declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedLongTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedLongTerms aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedTerms.ParsedBucket {
    -
    -        private Long key;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return Long.toString(key);
    -            }
    -            return null;
    -        }
    -
    -        public Number getKeyAsNumber() {
    -            return key;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            builder.field(CommonFields.KEY.getPreferredName(), key);
    -            if (super.getKeyAsString() != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            return builder;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java
    deleted file mode 100644
    index e1e865760d5e7..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java
    +++ /dev/null
    @@ -1,96 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.common.CheckedBiConsumer;
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.function.Supplier;
    -
    -public abstract class ParsedRareTerms extends ParsedMultiBucketAggregation implements RareTerms {
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.startArray(CommonFields.BUCKETS.getPreferredName());
    -        for (RareTerms.Bucket bucket : getBuckets()) {
    -            bucket.toXContent(builder, params);
    -        }
    -        builder.endArray();
    -        return builder;
    -    }
    -
    -    static void declareParsedTermsFields(
    -        final ObjectParser objectParser,
    -        final CheckedFunction bucketParser
    -    ) {
    -        declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser);
    -    }
    -
    -    public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements RareTerms.Bucket {
    -
    -        @Override
    -        public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            builder.startObject();
    -            keyToXContent(builder);
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        static  B parseRareTermsBucketXContent(
    -            final XContentParser parser,
    -            final Supplier bucketSupplier,
    -            final CheckedBiConsumer keyConsumer
    -        ) throws IOException {
    -
    -            final B bucket = bucketSupplier.get();
    -            final List aggregations = new ArrayList<>();
    -
    -            XContentParser.Token token;
    -            String currentFieldName = parser.currentName();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        keyConsumer.accept(parser, bucket);
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java
    deleted file mode 100644
    index 13cdc88a0082d..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java
    +++ /dev/null
    @@ -1,68 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedSignificantLongTerms extends ParsedSignificantTerms {
    -
    -    @Override
    -    public String getType() {
    -        return SignificantLongTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedSignificantLongTerms.class.getSimpleName(),
    -        true,
    -        ParsedSignificantLongTerms::new
    -    );
    -    static {
    -        declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedSignificantLongTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        return parseSignificantTermsXContent(() -> PARSER.parse(parser, null), name);
    -    }
    -
    -    public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket {
    -
    -        private Long key;
    -
    -        @Override
    -        public Object getKey() {
    -            return key;
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            return Long.toString(key);
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            builder.field(CommonFields.KEY.getPreferredName(), key);
    -            if (super.getKeyAsString() != null) {
    -                builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
    -            }
    -            return builder;
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseSignificantTermsBucketXContent(parser, new ParsedBucket(), (p, bucket) -> bucket.key = p.longValue());
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java
    deleted file mode 100644
    index 28cb9a6fb2a44..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java
    +++ /dev/null
    @@ -1,73 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.apache.lucene.util.BytesRef;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.nio.CharBuffer;
    -
    -public class ParsedSignificantStringTerms extends ParsedSignificantTerms {
    -
    -    @Override
    -    public String getType() {
    -        return SignificantStringTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedSignificantStringTerms.class.getSimpleName(),
    -        true,
    -        ParsedSignificantStringTerms::new
    -    );
    -    static {
    -        declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedSignificantStringTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        return parseSignificantTermsXContent(() -> PARSER.parse(parser, null), name);
    -    }
    -
    -    public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket {
    -
    -        private BytesRef key;
    -
    -        @Override
    -        public Object getKey() {
    -            return getKeyAsString();
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            return key.utf8ToString();
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            return builder.field(CommonFields.KEY.getPreferredName(), getKey());
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseSignificantTermsBucketXContent(parser, new ParsedBucket(), (p, bucket) -> {
    -                CharBuffer cb = p.charBufferOrNull();
    -                if (cb == null) {
    -                    bucket.key = null;
    -                } else {
    -                    bucket.key = new BytesRef(cb);
    -                }
    -            });
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java
    deleted file mode 100644
    index 873246e58e501..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java
    +++ /dev/null
    @@ -1,194 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.common.CheckedBiConsumer;
    -import org.elasticsearch.common.CheckedSupplier;
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ParseField;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.Iterator;
    -import java.util.List;
    -import java.util.Map;
    -import java.util.function.Function;
    -import java.util.stream.Collectors;
    -
    -public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregation
    -    implements
    -        SignificantTerms {
    -
    -    private Map bucketMap;
    -    protected long subsetSize;
    -    protected long supersetSize;
    -
    -    protected long getSubsetSize() {
    -        return subsetSize;
    -    }
    -
    -    protected long getSupersetSize() {
    -        return supersetSize;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    @Override
    -    public SignificantTerms.Bucket getBucketByKey(String term) {
    -        if (bucketMap == null) {
    -            bucketMap = buckets.stream().collect(Collectors.toMap(SignificantTerms.Bucket::getKeyAsString, Function.identity()));
    -        }
    -        return bucketMap.get(term);
    -    }
    -
    -    @Override
    -    public Iterator iterator() {
    -        return buckets.stream().map(bucket -> (SignificantTerms.Bucket) bucket).toList().iterator();
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(CommonFields.DOC_COUNT.getPreferredName(), subsetSize);
    -        builder.field(InternalMappedSignificantTerms.BG_COUNT, supersetSize);
    -        builder.startArray(CommonFields.BUCKETS.getPreferredName());
    -        for (SignificantTerms.Bucket bucket : buckets) {
    -            bucket.toXContent(builder, params);
    -        }
    -        builder.endArray();
    -        return builder;
    -    }
    -
    -    static  T parseSignificantTermsXContent(
    -        final CheckedSupplier aggregationSupplier,
    -        final String name
    -    ) throws IOException {
    -        T aggregation = aggregationSupplier.get();
    -        aggregation.setName(name);
    -        for (ParsedBucket bucket : aggregation.buckets) {
    -            bucket.subsetSize = aggregation.subsetSize;
    -            bucket.supersetSize = aggregation.supersetSize;
    -        }
    -        return aggregation;
    -    }
    -
    -    static void declareParsedSignificantTermsFields(
    -        final ObjectParser objectParser,
    -        final CheckedFunction bucketParser
    -    ) {
    -        declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser);
    -        objectParser.declareLong((parsedTerms, value) -> parsedTerms.subsetSize = value, CommonFields.DOC_COUNT);
    -        objectParser.declareLong(
    -            (parsedTerms, value) -> parsedTerms.supersetSize = value,
    -            new ParseField(InternalMappedSignificantTerms.BG_COUNT)
    -        );
    -    }
    -
    -    public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements SignificantTerms.Bucket {
    -
    -        protected long subsetDf;
    -        protected long subsetSize;
    -        protected long supersetDf;
    -        protected long supersetSize;
    -        protected double score;
    -
    -        @Override
    -        public long getDocCount() {
    -            return getSubsetDf();
    -        }
    -
    -        @Override
    -        public long getSubsetDf() {
    -            return subsetDf;
    -        }
    -
    -        @Override
    -        public long getSupersetDf() {
    -            return supersetDf;
    -        }
    -
    -        @Override
    -        public double getSignificanceScore() {
    -            return score;
    -        }
    -
    -        @Override
    -        public long getSupersetSize() {
    -            return supersetSize;
    -        }
    -
    -        @Override
    -        public long getSubsetSize() {
    -            return subsetSize;
    -        }
    -
    -        @Override
    -        public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            builder.startObject();
    -            keyToXContent(builder);
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            builder.field(InternalSignificantTerms.SCORE, getSignificanceScore());
    -            builder.field(InternalSignificantTerms.BG_COUNT, getSupersetDf());
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        @Override
    -        protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException;
    -
    -        static  B parseSignificantTermsBucketXContent(
    -            final XContentParser parser,
    -            final B bucket,
    -            final CheckedBiConsumer keyConsumer
    -        ) throws IOException {
    -
    -            final List aggregations = new ArrayList<>();
    -            XContentParser.Token token;
    -            String currentFieldName = parser.currentName();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        keyConsumer.accept(parser, bucket);
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        long value = parser.longValue();
    -                        bucket.subsetDf = value;
    -                        bucket.setDocCount(value);
    -                    } else if (InternalSignificantTerms.SCORE.equals(currentFieldName)) {
    -                        bucket.score = parser.doubleValue();
    -                    } else if (InternalSignificantTerms.BG_COUNT.equals(currentFieldName)) {
    -                        bucket.supersetDf = parser.longValue();
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java
    deleted file mode 100644
    index e19d07f5ee22d..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java
    +++ /dev/null
    @@ -1,78 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.apache.lucene.util.BytesRef;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.nio.CharBuffer;
    -
    -public class ParsedStringRareTerms extends ParsedRareTerms {
    -    @Override
    -    public String getType() {
    -        return StringRareTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedStringRareTerms.class.getSimpleName(),
    -        true,
    -        ParsedStringRareTerms::new
    -    );
    -
    -    static {
    -        declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedStringRareTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedStringRareTerms aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedRareTerms.ParsedBucket {
    -
    -        private BytesRef key;
    -
    -        @Override
    -        public Object getKey() {
    -            return getKeyAsString();
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return key.utf8ToString();
    -            }
    -            return null;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            return builder.field(CommonFields.KEY.getPreferredName(), getKey());
    -        }
    -
    -        static ParsedStringRareTerms.ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseRareTermsBucketXContent(parser, ParsedStringRareTerms.ParsedBucket::new, (p, bucket) -> {
    -                CharBuffer cb = p.charBufferOrNull();
    -                if (cb == null) {
    -                    bucket.key = null;
    -                } else {
    -                    bucket.key = new BytesRef(cb);
    -                }
    -            });
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java
    deleted file mode 100644
    index d592e59076fed..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java
    +++ /dev/null
    @@ -1,85 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.apache.lucene.util.BytesRef;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.nio.CharBuffer;
    -
    -public class ParsedStringTerms extends ParsedTerms {
    -
    -    @Override
    -    public String getType() {
    -        return StringTerms.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedStringTerms.class.getSimpleName(),
    -        true,
    -        ParsedStringTerms::new
    -    );
    -    static {
    -        declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
    -    }
    -
    -    public static ParsedStringTerms fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedStringTerms aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    public static class ParsedBucket extends ParsedTerms.ParsedBucket {
    -
    -        private BytesRef key;
    -
    -        @Override
    -        public Object getKey() {
    -            return getKeyAsString();
    -        }
    -
    -        @Override
    -        public String getKeyAsString() {
    -            String keyAsString = super.getKeyAsString();
    -            if (keyAsString != null) {
    -                return keyAsString;
    -            }
    -            if (key != null) {
    -                return key.utf8ToString();
    -            }
    -            return null;
    -        }
    -
    -        public Number getKeyAsNumber() {
    -            if (key != null) {
    -                return Double.parseDouble(key.utf8ToString());
    -            }
    -            return null;
    -        }
    -
    -        @Override
    -        protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
    -            return builder.field(CommonFields.KEY.getPreferredName(), getKey());
    -        }
    -
    -        static ParsedBucket fromXContent(XContentParser parser) throws IOException {
    -            return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> {
    -                CharBuffer cb = p.charBufferOrNull();
    -                if (cb == null) {
    -                    bucket.key = null;
    -                } else {
    -                    bucket.key = new BytesRef(cb);
    -                }
    -            });
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java
    deleted file mode 100644
    index 49a303f97453c..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java
    +++ /dev/null
    @@ -1,144 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.bucket.terms;
    -
    -import org.elasticsearch.common.CheckedBiConsumer;
    -import org.elasticsearch.common.xcontent.XContentParserUtils;
    -import org.elasticsearch.core.CheckedFunction;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.Aggregations;
    -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.ArrayList;
    -import java.util.List;
    -import java.util.function.Supplier;
    -
    -import static org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME;
    -import static org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.SUM_OF_OTHER_DOC_COUNTS;
    -
    -public abstract class ParsedTerms extends ParsedMultiBucketAggregation implements Terms {
    -
    -    protected long docCountErrorUpperBound;
    -    protected long sumOtherDocCount;
    -
    -    @Override
    -    public Long getDocCountError() {
    -        return docCountErrorUpperBound;
    -    }
    -
    -    @Override
    -    public long getSumOfOtherDocCounts() {
    -        return sumOtherDocCount;
    -    }
    -
    -    @Override
    -    public List getBuckets() {
    -        return buckets;
    -    }
    -
    -    @Override
    -    public Terms.Bucket getBucketByKey(String term) {
    -        for (Terms.Bucket bucket : getBuckets()) {
    -            if (bucket.getKeyAsString().equals(term)) {
    -                return bucket;
    -            }
    -        }
    -        return null;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
    -        builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), getSumOfOtherDocCounts());
    -        builder.startArray(CommonFields.BUCKETS.getPreferredName());
    -        for (Terms.Bucket bucket : getBuckets()) {
    -            bucket.toXContent(builder, params);
    -        }
    -        builder.endArray();
    -        return builder;
    -    }
    -
    -    static void declareParsedTermsFields(
    -        final ObjectParser objectParser,
    -        final CheckedFunction bucketParser
    -    ) {
    -        declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser);
    -        objectParser.declareLong(
    -            (parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value,
    -            DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME
    -        );
    -        objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value, SUM_OF_OTHER_DOC_COUNTS);
    -    }
    -
    -    public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Terms.Bucket {
    -
    -        boolean showDocCountError = false;
    -        protected long docCountError;
    -
    -        @Override
    -        public long getDocCountError() {
    -            return docCountError;
    -        }
    -
    -        @Override
    -        public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    -            builder.startObject();
    -            keyToXContent(builder);
    -            builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
    -            if (showDocCountError) {
    -                builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
    -            }
    -            getAggregations().toXContentInternal(builder, params);
    -            builder.endObject();
    -            return builder;
    -        }
    -
    -        static  B parseTermsBucketXContent(
    -            final XContentParser parser,
    -            final Supplier bucketSupplier,
    -            final CheckedBiConsumer keyConsumer
    -        ) throws IOException {
    -
    -            final B bucket = bucketSupplier.get();
    -            final List aggregations = new ArrayList<>();
    -
    -            XContentParser.Token token;
    -            String currentFieldName = parser.currentName();
    -            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                if (token == XContentParser.Token.FIELD_NAME) {
    -                    currentFieldName = parser.currentName();
    -                } else if (token.isValue()) {
    -                    if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setKeyAsString(parser.text());
    -                    } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                        keyConsumer.accept(parser, bucket);
    -                    } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
    -                        bucket.setDocCount(parser.longValue());
    -                    } else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) {
    -                        bucket.docCountError = parser.longValue();
    -                        bucket.showDocCountError = true;
    -                    }
    -                } else if (token == XContentParser.Token.START_OBJECT) {
    -                    XContentParserUtils.parseTypedKeysObject(
    -                        parser,
    -                        Aggregation.TYPED_KEYS_DELIMITER,
    -                        Aggregation.class,
    -                        aggregations::add
    -                    );
    -                }
    -            }
    -            bucket.setAggregations(new Aggregations(aggregations));
    -            return bucket;
    -        }
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java
    index f4fb3cef9bc79..d4e5ace459eb4 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java
    @@ -21,7 +21,7 @@
     import java.util.Map;
     import java.util.Objects;
     
    -public final class InternalCardinality extends InternalNumericMetricsAggregation.SingleValue implements Cardinality {
    +public class InternalCardinality extends InternalNumericMetricsAggregation.SingleValue implements Cardinality {
         private final AbstractHyperLogLogPlusPlus counts;
     
         InternalCardinality(String name, AbstractHyperLogLogPlusPlus counts, Map metadata) {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java
    index 77cb482edd8b4..fd637e14581ca 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java
    @@ -52,7 +52,7 @@ public InternalTopHits(
             this.from = from;
             this.size = size;
             this.topDocs = topDocs;
    -        this.searchHits = searchHits;
    +        this.searchHits = searchHits.asUnpooled();
         }
     
         /**
    @@ -63,7 +63,7 @@ public InternalTopHits(StreamInput in) throws IOException {
             from = in.readVInt();
             size = in.readVInt();
             topDocs = Lucene.readTopDocs(in);
    -        searchHits = new SearchHits(in);
    +        searchHits = SearchHits.readFrom(in, false);
         }
     
         @Override
    @@ -152,8 +152,9 @@ private static SearchHits extractSearchHits(
                     position = tracker[shardIndex]++;
                 } while (topDocsForShard.scoreDocs[position] != scoreDoc);
                 hits[i] = ((InternalTopHits) aggregations.get(shardIndex)).searchHits.getAt(position);
    +            assert hits[i].isPooled() == false;
             }
    -        return new SearchHits(hits, reducedTopDocs.totalHits, maxScore);
    +        return SearchHits.unpooled(hits, reducedTopDocs.totalHits, maxScore);
         }
     
         private static float reduceAndFindMaxScore(List aggregations, TopDocs[] shardDocs) {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java
    index 8f5d3c1b9f322..c3816bef6f0aa 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java
    @@ -64,7 +64,7 @@ public MedianAbsoluteDeviationAggregationBuilder(String name) {
         public MedianAbsoluteDeviationAggregationBuilder(StreamInput in) throws IOException {
             super(in);
             compression = in.readDouble();
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 executionHint = in.readOptionalWriteable(TDigestExecutionHint::readFrom);
             } else {
                 executionHint = TDigestExecutionHint.HIGH_ACCURACY;
    @@ -120,7 +120,7 @@ protected ValuesSourceType defaultValueSourceType() {
         @Override
         protected void innerWriteTo(StreamOutput out) throws IOException {
             out.writeDouble(compression);
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeOptionalWriteable(executionHint);
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java
    deleted file mode 100644
    index 346e79ddbcecf..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java
    +++ /dev/null
    @@ -1,52 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedAvg extends ParsedSingleValueNumericMetricsAggregation implements Avg {
    -
    -    @Override
    -    public double getValue() {
    -        return value();
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return AvgAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        // InternalAvg renders value only if the avg normalizer (count) is not 0.
    -        // We parse back `null` as Double.POSITIVE_INFINITY so we check for that value here to get the same xContent output
    -        boolean hasValue = value != Double.POSITIVE_INFINITY;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(ParsedAvg.class.getSimpleName(), true, ParsedAvg::new);
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
    -    }
    -
    -    public static ParsedAvg fromXContent(XContentParser parser, final String name) {
    -        ParsedAvg avg = PARSER.apply(parser, null);
    -        avg.setName(name);
    -        return avg;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java
    deleted file mode 100644
    index 9b8fdf1bde09c..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java
    +++ /dev/null
    @@ -1,64 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedCardinality extends ParsedAggregation implements Cardinality {
    -
    -    private long cardinalityValue;
    -
    -    @Override
    -    public String getValueAsString() {
    -        return Double.toString((double) cardinalityValue);
    -    }
    -
    -    @Override
    -    public double value() {
    -        return getValue();
    -    }
    -
    -    @Override
    -    public long getValue() {
    -        return cardinalityValue;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return CardinalityAggregationBuilder.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedCardinality.class.getSimpleName(),
    -        true,
    -        ParsedCardinality::new
    -    );
    -
    -    static {
    -        declareAggregationFields(PARSER);
    -        PARSER.declareLong((agg, value) -> agg.cardinalityValue = value, CommonFields.VALUE);
    -    }
    -
    -    public static ParsedCardinality fromXContent(XContentParser parser, final String name) {
    -        ParsedCardinality cardinality = PARSER.apply(parser, null);
    -        cardinality.setName(name);
    -        return cardinality;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(CommonFields.VALUE.getPreferredName(), cardinalityValue);
    -        return builder;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java
    deleted file mode 100644
    index 4bf32b5ef5cf1..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java
    +++ /dev/null
    @@ -1,399 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats.Fields;
    -import org.elasticsearch.xcontent.ConstructingObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser.ValueType;
    -import org.elasticsearch.xcontent.ParseField;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.Arrays;
    -import java.util.List;
    -
    -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
    -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
    -
    -public class ParsedExtendedStats extends ParsedStats implements ExtendedStats {
    -
    -    protected double sumOfSquares;
    -    protected double variance;
    -    protected double variancePopulation;
    -    protected double varianceSampling;
    -    protected double stdDeviation;
    -    protected double stdDeviationPopulation;
    -    protected double stdDeviationSampling;
    -    protected double stdDeviationBoundUpper;
    -    protected double stdDeviationBoundLower;
    -    protected double stdDeviationBoundUpperPopulation;
    -    protected double stdDeviationBoundLowerPopulation;
    -    protected double stdDeviationBoundUpperSampling;
    -    protected double stdDeviationBoundLowerSampling;
    -
    -    @Override
    -    public String getType() {
    -        return ExtendedStatsAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public double getSumOfSquares() {
    -        return sumOfSquares;
    -    }
    -
    -    @Override
    -    public double getVariance() {
    -        return variance;
    -    }
    -
    -    @Override
    -    public double getVariancePopulation() {
    -        return variancePopulation;
    -    }
    -
    -    @Override
    -    public double getVarianceSampling() {
    -        return varianceSampling;
    -    }
    -
    -    @Override
    -    public double getStdDeviation() {
    -        return stdDeviation;
    -    }
    -
    -    @Override
    -    public double getStdDeviationPopulation() {
    -        return stdDeviationPopulation;
    -    }
    -
    -    @Override
    -    public double getStdDeviationSampling() {
    -        return stdDeviationSampling;
    -    }
    -
    -    private void setStdDeviationBounds(List bounds) {
    -        this.stdDeviationBoundUpper = bounds.get(0);
    -        this.stdDeviationBoundLower = bounds.get(1);
    -        this.stdDeviationBoundUpperPopulation = bounds.get(2) == null ? 0 : bounds.get(2);
    -        this.stdDeviationBoundLowerPopulation = bounds.get(3) == null ? 0 : bounds.get(3);
    -        this.stdDeviationBoundUpperSampling = bounds.get(4) == null ? 0 : bounds.get(4);
    -        this.stdDeviationBoundLowerSampling = bounds.get(5) == null ? 0 : bounds.get(5);
    -    }
    -
    -    @Override
    -    public double getStdDeviationBound(Bounds bound) {
    -        return switch (bound) {
    -            case UPPER -> stdDeviationBoundUpper;
    -            case UPPER_POPULATION -> stdDeviationBoundUpperPopulation;
    -            case UPPER_SAMPLING -> stdDeviationBoundUpperSampling;
    -            case LOWER -> stdDeviationBoundLower;
    -            case LOWER_POPULATION -> stdDeviationBoundLowerPopulation;
    -            case LOWER_SAMPLING -> stdDeviationBoundLowerSampling;
    -        };
    -    }
    -
    -    private void setStdDeviationBoundsAsString(List boundsAsString) {
    -        this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper", boundsAsString.get(0));
    -        this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower", boundsAsString.get(1));
    -        if (boundsAsString.get(2) != null) {
    -            this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_population", boundsAsString.get(2));
    -        }
    -        if (boundsAsString.get(3) != null) {
    -            this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_population", boundsAsString.get(3));
    -        }
    -        if (boundsAsString.get(4) != null) {
    -            this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_sampling", boundsAsString.get(4));
    -        }
    -        if (boundsAsString.get(5) != null) {
    -            this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_sampling", boundsAsString.get(5));
    -        }
    -    }
    -
    -    @Override
    -    public String getStdDeviationAsString() {
    -        return valueAsString.getOrDefault(Fields.STD_DEVIATION_AS_STRING, Double.toString(stdDeviation));
    -    }
    -
    -    @Override
    -    public String getStdDeviationPopulationAsString() {
    -        return valueAsString.getOrDefault(Fields.STD_DEVIATION_POPULATION_AS_STRING, Double.toString(stdDeviationPopulation));
    -    }
    -
    -    @Override
    -    public String getStdDeviationSamplingAsString() {
    -        return valueAsString.getOrDefault(Fields.STD_DEVIATION_SAMPLING_AS_STRING, Double.toString(stdDeviationSampling));
    -    }
    -
    -    @Override
    -    public String getStdDeviationBoundAsString(Bounds bound) {
    -        return switch (bound) {
    -            case UPPER -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper",
    -                Double.toString(stdDeviationBoundUpper)
    -            );
    -            case UPPER_POPULATION -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_population",
    -                Double.toString(stdDeviationBoundUpperPopulation)
    -            );
    -            case UPPER_SAMPLING -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_sampling",
    -                Double.toString(stdDeviationBoundUpperSampling)
    -            );
    -            case LOWER -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower",
    -                Double.toString(stdDeviationBoundLower)
    -            );
    -            case LOWER_POPULATION -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_population",
    -                Double.toString(stdDeviationBoundLowerPopulation)
    -            );
    -            case LOWER_SAMPLING -> valueAsString.getOrDefault(
    -                Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_sampling",
    -                Double.toString(stdDeviationBoundLowerSampling)
    -            );
    -        };
    -    }
    -
    -    @Override
    -    public String getSumOfSquaresAsString() {
    -        return valueAsString.getOrDefault(Fields.SUM_OF_SQRS_AS_STRING, Double.toString(sumOfSquares));
    -    }
    -
    -    @Override
    -    public String getVarianceAsString() {
    -        return valueAsString.getOrDefault(Fields.VARIANCE_AS_STRING, Double.toString(variance));
    -    }
    -
    -    @Override
    -    public String getVariancePopulationAsString() {
    -        return valueAsString.getOrDefault(Fields.VARIANCE_POPULATION_AS_STRING, Double.toString(variancePopulation));
    -    }
    -
    -    @Override
    -    public String getVarianceSamplingAsString() {
    -        return valueAsString.getOrDefault(Fields.VARIANCE_SAMPLING_AS_STRING, Double.toString(varianceSampling));
    -    }
    -
    -    @Override
    -    protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
    -        if (count != 0) {
    -            builder.field(Fields.SUM_OF_SQRS, sumOfSquares);
    -            builder.field(Fields.VARIANCE, getVariance());
    -            builder.field(Fields.VARIANCE_POPULATION, getVariancePopulation());
    -            builder.field(Fields.VARIANCE_SAMPLING, getVarianceSampling());
    -            builder.field(Fields.STD_DEVIATION, getStdDeviation());
    -            builder.field(Fields.STD_DEVIATION_POPULATION, getStdDeviationPopulation());
    -            builder.field(Fields.STD_DEVIATION_SAMPLING, getStdDeviationSampling());
    -            builder.startObject(Fields.STD_DEVIATION_BOUNDS);
    -            {
    -                builder.field(Fields.UPPER, getStdDeviationBound(Bounds.UPPER));
    -                builder.field(Fields.LOWER, getStdDeviationBound(Bounds.LOWER));
    -                builder.field(Fields.UPPER_POPULATION, getStdDeviationBound(Bounds.UPPER_POPULATION));
    -                builder.field(Fields.LOWER_POPULATION, getStdDeviationBound(Bounds.LOWER_POPULATION));
    -                builder.field(Fields.UPPER_SAMPLING, getStdDeviationBound(Bounds.UPPER_SAMPLING));
    -                builder.field(Fields.LOWER_SAMPLING, getStdDeviationBound(Bounds.LOWER_SAMPLING));
    -            }
    -            builder.endObject();
    -            if (valueAsString.containsKey(Fields.SUM_OF_SQRS_AS_STRING)) {
    -                builder.field(Fields.SUM_OF_SQRS_AS_STRING, getSumOfSquaresAsString());
    -                builder.field(Fields.VARIANCE_AS_STRING, getVarianceAsString());
    -                builder.field(Fields.VARIANCE_POPULATION_AS_STRING, getVariancePopulationAsString());
    -                builder.field(Fields.VARIANCE_SAMPLING_AS_STRING, getVarianceSamplingAsString());
    -                builder.field(Fields.STD_DEVIATION_AS_STRING, getStdDeviationAsString());
    -                builder.field(Fields.STD_DEVIATION_POPULATION_AS_STRING, getStdDeviationPopulationAsString());
    -                builder.field(Fields.STD_DEVIATION_SAMPLING_AS_STRING, getStdDeviationSamplingAsString());
    -                builder.startObject(Fields.STD_DEVIATION_BOUNDS_AS_STRING);
    -                {
    -                    builder.field(Fields.UPPER, getStdDeviationBoundAsString(Bounds.UPPER));
    -                    builder.field(Fields.LOWER, getStdDeviationBoundAsString(Bounds.LOWER));
    -                    builder.field(Fields.UPPER_POPULATION, getStdDeviationBoundAsString(Bounds.UPPER_POPULATION));
    -                    builder.field(Fields.LOWER_POPULATION, getStdDeviationBoundAsString(Bounds.LOWER_POPULATION));
    -                    builder.field(Fields.UPPER_SAMPLING, getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING));
    -                    builder.field(Fields.LOWER_SAMPLING, getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING));
    -                }
    -                builder.endObject();
    -            }
    -        } else {
    -            builder.nullField(Fields.SUM_OF_SQRS);
    -            builder.nullField(Fields.VARIANCE);
    -            builder.nullField(Fields.VARIANCE_POPULATION);
    -            builder.nullField(Fields.VARIANCE_SAMPLING);
    -            builder.nullField(Fields.STD_DEVIATION);
    -            builder.nullField(Fields.STD_DEVIATION_POPULATION);
    -            builder.nullField(Fields.STD_DEVIATION_SAMPLING);
    -            builder.startObject(Fields.STD_DEVIATION_BOUNDS);
    -            {
    -                builder.nullField(Fields.UPPER);
    -                builder.nullField(Fields.LOWER);
    -                builder.nullField(Fields.UPPER_POPULATION);
    -                builder.nullField(Fields.LOWER_POPULATION);
    -                builder.nullField(Fields.UPPER_SAMPLING);
    -                builder.nullField(Fields.LOWER_SAMPLING);
    -            }
    -            builder.endObject();
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedExtendedStats.class.getSimpleName(),
    -        true,
    -        ParsedExtendedStats::new
    -    );
    -
    -    private static final ConstructingObjectParser, Void> STD_BOUNDS_PARSER = new ConstructingObjectParser<>(
    -        ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS",
    -        true,
    -        args -> Arrays.stream(args).map(d -> (Double) d).toList()
    -    );
    -
    -    private static final ConstructingObjectParser, Void> STD_BOUNDS_AS_STRING_PARSER = new ConstructingObjectParser<>(
    -        ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS_AS_STRING",
    -        true,
    -        args -> Arrays.stream(args).map(d -> (String) d).toList()
    -    );
    -
    -    static {
    -        STD_BOUNDS_PARSER.declareField(
    -            constructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.UPPER),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_PARSER.declareField(
    -            constructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.LOWER),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_PARSER.declareField(
    -            optionalConstructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.UPPER_POPULATION),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_PARSER.declareField(
    -            optionalConstructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.LOWER_POPULATION),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_PARSER.declareField(
    -            optionalConstructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.UPPER_SAMPLING),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_PARSER.declareField(
    -            optionalConstructorArg(),
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.LOWER_SAMPLING),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.UPPER));
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.LOWER));
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(optionalConstructorArg(), new ParseField(Fields.UPPER_POPULATION));
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(optionalConstructorArg(), new ParseField(Fields.LOWER_POPULATION));
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(optionalConstructorArg(), new ParseField(Fields.UPPER_SAMPLING));
    -        STD_BOUNDS_AS_STRING_PARSER.declareString(optionalConstructorArg(), new ParseField(Fields.LOWER_SAMPLING));
    -        declareExtendedStatsFields(PARSER);
    -    }
    -
    -    protected static void declareExtendedStatsFields(ObjectParser objectParser) {
    -        declareStatsFields(objectParser);
    -        objectParser.declareField(
    -            (agg, value) -> agg.sumOfSquares = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.SUM_OF_SQRS),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.variance = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.VARIANCE),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.variancePopulation = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.VARIANCE_POPULATION),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.varianceSampling = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.VARIANCE_SAMPLING),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.stdDeviation = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.STD_DEVIATION),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.stdDeviationPopulation = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.STD_DEVIATION_POPULATION),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.stdDeviationSampling = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.STD_DEVIATION_SAMPLING),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareObject(
    -            ParsedExtendedStats::setStdDeviationBounds,
    -            STD_BOUNDS_PARSER,
    -            new ParseField(Fields.STD_DEVIATION_BOUNDS)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.SUM_OF_SQRS_AS_STRING, value),
    -            new ParseField(Fields.SUM_OF_SQRS_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_AS_STRING, value),
    -            new ParseField(Fields.VARIANCE_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_POPULATION_AS_STRING, value),
    -            new ParseField(Fields.VARIANCE_POPULATION_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_SAMPLING_AS_STRING, value),
    -            new ParseField(Fields.VARIANCE_SAMPLING_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_AS_STRING, value),
    -            new ParseField(Fields.STD_DEVIATION_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_POPULATION_AS_STRING, value),
    -            new ParseField(Fields.STD_DEVIATION_POPULATION_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_SAMPLING_AS_STRING, value),
    -            new ParseField(Fields.STD_DEVIATION_SAMPLING_AS_STRING)
    -        );
    -        objectParser.declareObject(
    -            ParsedExtendedStats::setStdDeviationBoundsAsString,
    -            STD_BOUNDS_AS_STRING_PARSER,
    -            new ParseField(Fields.STD_DEVIATION_BOUNDS_AS_STRING)
    -        );
    -    }
    -
    -    public static ParsedExtendedStats fromXContent(XContentParser parser, final String name) {
    -        ParsedExtendedStats parsedStats = PARSER.apply(parser, null);
    -        parsedStats.setName(name);
    -        return parsedStats;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java
    deleted file mode 100644
    index 24f68d87802bf..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java
    +++ /dev/null
    @@ -1,98 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.common.geo.GeoBoundingBox;
    -import org.elasticsearch.common.geo.GeoPoint;
    -import org.elasticsearch.core.Nullable;
    -import org.elasticsearch.core.Tuple;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ConstructingObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -import static org.elasticsearch.common.geo.GeoBoundingBox.BOTTOM_RIGHT_FIELD;
    -import static org.elasticsearch.common.geo.GeoBoundingBox.BOUNDS_FIELD;
    -import static org.elasticsearch.common.geo.GeoBoundingBox.LAT_FIELD;
    -import static org.elasticsearch.common.geo.GeoBoundingBox.LON_FIELD;
    -import static org.elasticsearch.common.geo.GeoBoundingBox.TOP_LEFT_FIELD;
    -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
    -
    -public class ParsedGeoBounds extends ParsedAggregation implements GeoBounds {
    -
    -    // A top of Double.NEGATIVE_INFINITY yields an empty xContent, so the bounding box is null
    -    @Nullable
    -    private GeoBoundingBox geoBoundingBox;
    -
    -    @Override
    -    public String getType() {
    -        return GeoBoundsAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        if (geoBoundingBox != null) {
    -            builder.startObject(GeoBoundingBox.BOUNDS_FIELD.getPreferredName());
    -            geoBoundingBox.toXContentFragment(builder);
    -            builder.endObject();
    -        }
    -        return builder;
    -    }
    -
    -    @Override
    -    @Nullable
    -    public GeoPoint topLeft() {
    -        return geoBoundingBox != null ? geoBoundingBox.topLeft() : null;
    -    }
    -
    -    @Override
    -    @Nullable
    -    public GeoPoint bottomRight() {
    -        return geoBoundingBox != null ? geoBoundingBox.bottomRight() : null;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedGeoBounds.class.getSimpleName(),
    -        true,
    -        ParsedGeoBounds::new
    -    );
    -
    -    private static final ConstructingObjectParser, Void> BOUNDS_PARSER = new ConstructingObjectParser<>(
    -        ParsedGeoBounds.class.getSimpleName() + "_BOUNDS",
    -        true,
    -        args -> new Tuple<>((GeoPoint) args[0], (GeoPoint) args[1])
    -    );
    -
    -    private static final ObjectParser GEO_POINT_PARSER = new ObjectParser<>(
    -        ParsedGeoBounds.class.getSimpleName() + "_POINT",
    -        true,
    -        GeoPoint::new
    -    );
    -
    -    static {
    -        declareAggregationFields(PARSER);
    -        PARSER.declareObject((agg, bbox) -> agg.geoBoundingBox = new GeoBoundingBox(bbox.v1(), bbox.v2()), BOUNDS_PARSER, BOUNDS_FIELD);
    -
    -        BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, TOP_LEFT_FIELD);
    -        BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, BOTTOM_RIGHT_FIELD);
    -
    -        GEO_POINT_PARSER.declareDouble(GeoPoint::resetLat, LAT_FIELD);
    -        GEO_POINT_PARSER.declareDouble(GeoPoint::resetLon, LON_FIELD);
    -    }
    -
    -    public static ParsedGeoBounds fromXContent(XContentParser parser, final String name) {
    -        ParsedGeoBounds geoBounds = PARSER.apply(parser, null);
    -        geoBounds.setName(name);
    -        return geoBounds;
    -    }
    -
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java
    deleted file mode 100644
    index 31a44959becd3..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java
    +++ /dev/null
    @@ -1,82 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.common.geo.GeoPoint;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid.Fields;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -/**
    - * Serialization and merge logic for {@link GeoCentroidAggregator}.
    - */
    -public class ParsedGeoCentroid extends ParsedAggregation implements GeoCentroid {
    -    private GeoPoint centroid;
    -    private long count;
    -
    -    @Override
    -    public GeoPoint centroid() {
    -        return centroid;
    -    }
    -
    -    @Override
    -    public long count() {
    -        return count;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return GeoCentroidAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        if (centroid != null) {
    -            builder.startObject(InternalCentroid.Fields.CENTROID.getPreferredName());
    -            {
    -                builder.field(Fields.CENTROID_LAT.getPreferredName(), centroid.lat());
    -                builder.field(Fields.CENTROID_LON.getPreferredName(), centroid.lon());
    -            }
    -            builder.endObject();
    -        }
    -        builder.field(InternalCentroid.Fields.COUNT.getPreferredName(), count);
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedGeoCentroid.class.getSimpleName(),
    -        true,
    -        ParsedGeoCentroid::new
    -    );
    -
    -    private static final ObjectParser GEO_POINT_PARSER = new ObjectParser<>(
    -        ParsedGeoCentroid.class.getSimpleName() + "_POINT",
    -        true,
    -        GeoPoint::new
    -    );
    -
    -    static {
    -        declareAggregationFields(PARSER);
    -        PARSER.declareObject((agg, centroid) -> agg.centroid = centroid, GEO_POINT_PARSER, InternalCentroid.Fields.CENTROID);
    -        PARSER.declareLong((agg, count) -> agg.count = count, InternalCentroid.Fields.COUNT);
    -
    -        GEO_POINT_PARSER.declareDouble(GeoPoint::resetLat, Fields.CENTROID_LAT);
    -        GEO_POINT_PARSER.declareDouble(GeoPoint::resetLon, Fields.CENTROID_LON);
    -    }
    -
    -    public static ParsedGeoCentroid fromXContent(XContentParser parser, final String name) {
    -        ParsedGeoCentroid geoCentroid = PARSER.apply(parser, null);
    -        geoCentroid.setName(name);
    -        return geoCentroid;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java
    deleted file mode 100644
    index 22834ca9b2bdc..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java
    +++ /dev/null
    @@ -1,37 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedHDRPercentileRanks extends ParsedPercentileRanks {
    -
    -    @Override
    -    public String getType() {
    -        return InternalHDRPercentileRanks.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedHDRPercentileRanks.class.getSimpleName(),
    -        true,
    -        ParsedHDRPercentileRanks::new
    -    );
    -    static {
    -        ParsedPercentiles.declarePercentilesFields(PARSER);
    -    }
    -
    -    public static ParsedHDRPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedHDRPercentileRanks aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java
    deleted file mode 100644
    index 2ee56bf648dcc..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java
    +++ /dev/null
    @@ -1,57 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedHDRPercentiles extends ParsedPercentiles implements Percentiles {
    -
    -    @Override
    -    public String getType() {
    -        return InternalHDRPercentiles.NAME;
    -    }
    -
    -    @Override
    -    public double percentile(double percent) {
    -        return getPercentile(percent);
    -    }
    -
    -    @Override
    -    public String percentileAsString(double percent) {
    -        return getPercentileAsString(percent);
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedHDRPercentiles.class.getSimpleName(),
    -        true,
    -        ParsedHDRPercentiles::new
    -    );
    -    static {
    -        ParsedPercentiles.declarePercentilesFields(PARSER);
    -    }
    -
    -    public static ParsedHDRPercentiles fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedHDRPercentiles aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -
    -    @Override
    -    public double value(String name) {
    -        return percentile(Double.parseDouble(name));
    -    }
    -
    -    @Override
    -    public Iterable valueNames() {
    -        return percentiles.keySet().stream().map(Object::toString).toList();
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java
    deleted file mode 100644
    index 637ae917fd870..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java
    +++ /dev/null
    @@ -1,44 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedMax extends ParsedSingleValueNumericMetricsAggregation {
    -    @Override
    -    public String getType() {
    -        return MaxAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        boolean hasValue = Double.isInfinite(value) == false;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(ParsedMax.class.getSimpleName(), true, ParsedMax::new);
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
    -    }
    -
    -    public static ParsedMax fromXContent(XContentParser parser, final String name) {
    -        ParsedMax max = PARSER.apply(parser, null);
    -        max.setName(name);
    -        return max;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMedianAbsoluteDeviation.java
    deleted file mode 100644
    index 6d1794ab57ba2..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMedianAbsoluteDeviation.java
    +++ /dev/null
    @@ -1,54 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedMedianAbsoluteDeviation extends ParsedSingleValueNumericMetricsAggregation implements MedianAbsoluteDeviation {
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedMedianAbsoluteDeviation.class.getSimpleName(),
    -        true,
    -        ParsedMedianAbsoluteDeviation::new
    -    );
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NaN);
    -    }
    -
    -    public static ParsedMedianAbsoluteDeviation fromXContent(XContentParser parser, String name) {
    -        ParsedMedianAbsoluteDeviation parsedMedianAbsoluteDeviation = PARSER.apply(parser, null);
    -        parsedMedianAbsoluteDeviation.setName(name);
    -        return parsedMedianAbsoluteDeviation;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        final boolean hasValue = Double.isFinite(getMedianAbsoluteDeviation());
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? getMedianAbsoluteDeviation() : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    @Override
    -    public double getMedianAbsoluteDeviation() {
    -        return value();
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return MedianAbsoluteDeviationAggregationBuilder.NAME;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java
    deleted file mode 100644
    index 7a6ca949081cb..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java
    +++ /dev/null
    @@ -1,44 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedMin extends ParsedSingleValueNumericMetricsAggregation {
    -    @Override
    -    public String getType() {
    -        return MinAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        boolean hasValue = Double.isInfinite(value) == false;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(ParsedMin.class.getSimpleName(), true, ParsedMin::new);
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
    -    }
    -
    -    public static ParsedMin fromXContent(XContentParser parser, final String name) {
    -        ParsedMin min = PARSER.apply(parser, null);
    -        min.setName(name);
    -        return min;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java
    deleted file mode 100644
    index 44ecf5cf69b4c..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java
    +++ /dev/null
    @@ -1,41 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.common.collect.Iterators;
    -
    -import java.util.Iterator;
    -
    -abstract class ParsedPercentileRanks extends ParsedPercentiles implements PercentileRanks {
    -
    -    @Override
    -    public double percent(double value) {
    -        return getPercentile(value);
    -    }
    -
    -    @Override
    -    public String percentAsString(double value) {
    -        return getPercentileAsString(value);
    -    }
    -
    -    @Override
    -    public double value(String name) {
    -        return percent(Double.parseDouble(name));
    -    }
    -
    -    @Override
    -    public Iterable valueNames() {
    -        return percentiles.keySet().stream().map(Object::toString).toList();
    -    }
    -
    -    @Override
    -    public Iterator iterator() {
    -        return Iterators.map(super.iterator(), percentile -> new Percentile(percentile.value(), percentile.percent()));
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java
    deleted file mode 100644
    index 3af30aa16f094..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java
    +++ /dev/null
    @@ -1,162 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.common.collect.Iterators;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.HashMap;
    -import java.util.Iterator;
    -import java.util.LinkedHashMap;
    -import java.util.Map;
    -
    -public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable {
    -
    -    protected final Map percentiles = new LinkedHashMap<>();
    -    protected final Map percentilesAsString = new HashMap<>();
    -
    -    private boolean keyed;
    -
    -    void addPercentile(Double key, Double value) {
    -        percentiles.put(key, value);
    -    }
    -
    -    void addPercentileAsString(Double key, String valueAsString) {
    -        percentilesAsString.put(key, valueAsString);
    -    }
    -
    -    protected Double getPercentile(double percent) {
    -        if (percentiles.isEmpty()) {
    -            return Double.NaN;
    -        }
    -        return percentiles.get(percent);
    -    }
    -
    -    protected String getPercentileAsString(double percent) {
    -        String valueAsString = percentilesAsString.get(percent);
    -        if (valueAsString != null) {
    -            return valueAsString;
    -        }
    -        Double value = getPercentile(percent);
    -        if (value != null) {
    -            return Double.toString(value);
    -        }
    -        return null;
    -    }
    -
    -    void setKeyed(boolean keyed) {
    -        this.keyed = keyed;
    -    }
    -
    -    @Override
    -    public Iterator iterator() {
    -        return Iterators.map(percentiles.entrySet().iterator(), next -> new Percentile(next.getKey(), next.getValue()));
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        final boolean valuesAsString = (percentilesAsString.isEmpty() == false);
    -        if (keyed) {
    -            builder.startObject(CommonFields.VALUES.getPreferredName());
    -            for (Map.Entry percentile : percentiles.entrySet()) {
    -                Double key = percentile.getKey();
    -                Double value = percentile.getValue();
    -                builder.field(String.valueOf(key), value.isNaN() ? null : value);
    -                if (valuesAsString && value.isNaN() == false) {
    -                    builder.field(key + "_as_string", getPercentileAsString(key));
    -                }
    -            }
    -            builder.endObject();
    -        } else {
    -            builder.startArray(CommonFields.VALUES.getPreferredName());
    -            for (Map.Entry percentile : percentiles.entrySet()) {
    -                Double key = percentile.getKey();
    -                builder.startObject();
    -                {
    -                    builder.field(CommonFields.KEY.getPreferredName(), key);
    -                    Double value = percentile.getValue();
    -                    builder.field(CommonFields.VALUE.getPreferredName(), value.isNaN() ? null : value);
    -                    if (valuesAsString && value.isNaN() == false) {
    -                        builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), getPercentileAsString(key));
    -                    }
    -                }
    -                builder.endObject();
    -            }
    -            builder.endArray();
    -        }
    -        return builder;
    -    }
    -
    -    protected static void declarePercentilesFields(ObjectParser objectParser) {
    -        ParsedAggregation.declareAggregationFields(objectParser);
    -
    -        objectParser.declareField((parser, aggregation, context) -> {
    -            XContentParser.Token token = parser.currentToken();
    -            if (token == XContentParser.Token.START_OBJECT) {
    -                aggregation.setKeyed(true);
    -                while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                    if (token.isValue()) {
    -                        if (token == XContentParser.Token.VALUE_NUMBER) {
    -                            aggregation.addPercentile(Double.valueOf(parser.currentName()), parser.doubleValue());
    -                        } else if (token == XContentParser.Token.VALUE_STRING) {
    -                            int i = parser.currentName().indexOf("_as_string");
    -                            if (i > 0) {
    -                                double key = Double.parseDouble(parser.currentName().substring(0, i));
    -                                aggregation.addPercentileAsString(key, parser.text());
    -                            } else {
    -                                aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.valueOf(parser.text()));
    -                            }
    -                        }
    -                    } else if (token == XContentParser.Token.VALUE_NULL) {
    -                        aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.NaN);
    -                    } else {
    -                        parser.skipChildren(); // skip potential inner objects and arrays for forward compatibility
    -                    }
    -                }
    -            } else if (token == XContentParser.Token.START_ARRAY) {
    -                aggregation.setKeyed(false);
    -
    -                String currentFieldName = null;
    -                while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
    -                    Double key = null;
    -                    Double value = null;
    -                    String valueAsString = null;
    -
    -                    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    -                        if (token == XContentParser.Token.FIELD_NAME) {
    -                            currentFieldName = parser.currentName();
    -                        } else if (token.isValue()) {
    -                            if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
    -                                key = parser.doubleValue();
    -                            } else if (CommonFields.VALUE.getPreferredName().equals(currentFieldName)) {
    -                                value = parser.doubleValue();
    -                            } else if (CommonFields.VALUE_AS_STRING.getPreferredName().equals(currentFieldName)) {
    -                                valueAsString = parser.text();
    -                            }
    -                        } else if (token == XContentParser.Token.VALUE_NULL) {
    -                            value = Double.NaN;
    -                        } else {
    -                            parser.skipChildren(); // skip potential inner objects and arrays for forward compatibility
    -                        }
    -                    }
    -                    if (key != null) {
    -                        aggregation.addPercentile(key, value);
    -                        if (valueAsString != null) {
    -                            aggregation.addPercentileAsString(key, valueAsString);
    -                        }
    -                    }
    -                }
    -            }
    -        }, CommonFields.VALUES, ObjectParser.ValueType.OBJECT_ARRAY);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java
    deleted file mode 100644
    index 4d9946ed9f98f..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java
    +++ /dev/null
    @@ -1,88 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.common.bytes.BytesArray;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser.ValueType;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -import org.elasticsearch.xcontent.XContentParser.Token;
    -
    -import java.io.IOException;
    -import java.util.Collections;
    -import java.util.List;
    -
    -public class ParsedScriptedMetric extends ParsedAggregation implements ScriptedMetric {
    -    private List aggregation;
    -
    -    @Override
    -    public String getType() {
    -        return ScriptedMetricAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public Object aggregation() {
    -        assert aggregation.size() == 1; // see InternalScriptedMetric#aggregations() for why we can assume this
    -        return aggregation.get(0);
    -    }
    -
    -    @Override
    -    public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        return builder.field(CommonFields.VALUE.getPreferredName(), aggregation());
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedScriptedMetric.class.getSimpleName(),
    -        true,
    -        ParsedScriptedMetric::new
    -    );
    -
    -    static {
    -        declareAggregationFields(PARSER);
    -        PARSER.declareField(
    -            (agg, value) -> agg.aggregation = Collections.singletonList(value),
    -            ParsedScriptedMetric::parseValue,
    -            CommonFields.VALUE,
    -            ValueType.VALUE_OBJECT_ARRAY
    -        );
    -    }
    -
    -    private static Object parseValue(XContentParser parser) throws IOException {
    -        Token token = parser.currentToken();
    -        Object value = null;
    -        if (token == XContentParser.Token.VALUE_NULL) {
    -            value = null;
    -        } else if (token.isValue()) {
    -            if (token == XContentParser.Token.VALUE_STRING) {
    -                // binary values will be parsed back and returned as base64 strings when reading from json and yaml
    -                value = parser.text();
    -            } else if (token == XContentParser.Token.VALUE_NUMBER) {
    -                value = parser.numberValue();
    -            } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
    -                value = parser.booleanValue();
    -            } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
    -                // binary values will be parsed back and returned as BytesArray when reading from cbor and smile
    -                value = new BytesArray(parser.binaryValue());
    -            }
    -        } else if (token == XContentParser.Token.START_OBJECT) {
    -            value = parser.map();
    -        } else if (token == XContentParser.Token.START_ARRAY) {
    -            value = parser.list();
    -        }
    -        return value;
    -    }
    -
    -    public static ParsedScriptedMetric fromXContent(XContentParser parser, final String name) {
    -        ParsedScriptedMetric aggregation = PARSER.apply(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java
    deleted file mode 100644
    index ddb8b9f6f97af..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java
    +++ /dev/null
    @@ -1,56 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser.ValueType;
    -
    -public abstract class ParsedSingleValueNumericMetricsAggregation extends ParsedAggregation
    -    implements
    -        NumericMetricsAggregation.SingleValue {
    -
    -    protected double value;
    -    protected String valueAsString;
    -
    -    @Override
    -    public String getValueAsString() {
    -        if (valueAsString != null) {
    -            return valueAsString;
    -        } else {
    -            return Double.toString(value);
    -        }
    -    }
    -
    -    @Override
    -    public double value() {
    -        return value;
    -    }
    -
    -    protected void setValue(double value) {
    -        this.value = value;
    -    }
    -
    -    protected void setValueAsString(String valueAsString) {
    -        this.valueAsString = valueAsString;
    -    }
    -
    -    protected static void declareSingleValueFields(
    -        ObjectParser objectParser,
    -        double defaultNullValue
    -    ) {
    -        declareAggregationFields(objectParser);
    -        objectParser.declareField(
    -            ParsedSingleValueNumericMetricsAggregation::setValue,
    -            (parser, context) -> parseDouble(parser, defaultNullValue),
    -            CommonFields.VALUE,
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareString(ParsedSingleValueNumericMetricsAggregation::setValueAsString, CommonFields.VALUE_AS_STRING);
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java
    deleted file mode 100644
    index c6621844c8b88..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java
    +++ /dev/null
    @@ -1,191 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.search.aggregations.metrics.InternalStats.Fields;
    -import org.elasticsearch.search.aggregations.metrics.InternalStats.Metrics;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser.ValueType;
    -import org.elasticsearch.xcontent.ParseField;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.HashMap;
    -import java.util.Map;
    -
    -import static org.elasticsearch.search.aggregations.metrics.InternalStats.METRIC_NAMES;
    -
    -public class ParsedStats extends ParsedAggregation implements Stats {
    -
    -    protected long count;
    -    protected double min;
    -    protected double max;
    -    protected double sum;
    -    protected double avg;
    -
    -    protected final Map valueAsString = new HashMap<>();
    -
    -    @Override
    -    public long getCount() {
    -        return count;
    -    }
    -
    -    @Override
    -    public double getMin() {
    -        return min;
    -    }
    -
    -    @Override
    -    public double getMax() {
    -        return max;
    -    }
    -
    -    @Override
    -    public double getAvg() {
    -        return avg;
    -    }
    -
    -    @Override
    -    public double getSum() {
    -        return sum;
    -    }
    -
    -    @Override
    -    public String getMinAsString() {
    -        return valueAsString.getOrDefault(Fields.MIN_AS_STRING, Double.toString(min));
    -    }
    -
    -    @Override
    -    public String getMaxAsString() {
    -        return valueAsString.getOrDefault(Fields.MAX_AS_STRING, Double.toString(max));
    -    }
    -
    -    @Override
    -    public String getAvgAsString() {
    -        return valueAsString.getOrDefault(Fields.AVG_AS_STRING, Double.toString(avg));
    -    }
    -
    -    @Override
    -    public String getSumAsString() {
    -        return valueAsString.getOrDefault(Fields.SUM_AS_STRING, Double.toString(sum));
    -    }
    -
    -    @Override
    -    public double value(String name) {
    -        Metrics metrics = Metrics.valueOf(name);
    -        return switch (metrics) {
    -            case min -> min;
    -            case max -> max;
    -            case avg -> avg;
    -            case count -> count;
    -            case sum -> sum;
    -        };
    -    }
    -
    -    @Override
    -    public Iterable valueNames() {
    -        return METRIC_NAMES;
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return StatsAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(Fields.COUNT, count);
    -        if (count != 0) {
    -            builder.field(Fields.MIN, min);
    -            builder.field(Fields.MAX, max);
    -            builder.field(Fields.AVG, avg);
    -            builder.field(Fields.SUM, sum);
    -            if (valueAsString.get(Fields.MIN_AS_STRING) != null) {
    -                builder.field(Fields.MIN_AS_STRING, getMinAsString());
    -                builder.field(Fields.MAX_AS_STRING, getMaxAsString());
    -                builder.field(Fields.AVG_AS_STRING, getAvgAsString());
    -                builder.field(Fields.SUM_AS_STRING, getSumAsString());
    -            }
    -        } else {
    -            builder.nullField(Fields.MIN);
    -            builder.nullField(Fields.MAX);
    -            builder.nullField(Fields.AVG);
    -            builder.field(Fields.SUM, 0.0d);
    -        }
    -        otherStatsToXContent(builder, params);
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedStats.class.getSimpleName(),
    -        true,
    -        ParsedStats::new
    -    );
    -
    -    static {
    -        declareStatsFields(PARSER);
    -    }
    -
    -    protected static void declareStatsFields(ObjectParser objectParser) {
    -        declareAggregationFields(objectParser);
    -        objectParser.declareLong((agg, value) -> agg.count = value, new ParseField(Fields.COUNT));
    -        objectParser.declareField(
    -            (agg, value) -> agg.min = value,
    -            (parser, context) -> parseDouble(parser, Double.POSITIVE_INFINITY),
    -            new ParseField(Fields.MIN),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.max = value,
    -            (parser, context) -> parseDouble(parser, Double.NEGATIVE_INFINITY),
    -            new ParseField(Fields.MAX),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.avg = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.AVG),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareField(
    -            (agg, value) -> agg.sum = value,
    -            (parser, context) -> parseDouble(parser, 0),
    -            new ParseField(Fields.SUM),
    -            ValueType.DOUBLE_OR_NULL
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.MIN_AS_STRING, value),
    -            new ParseField(Fields.MIN_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.MAX_AS_STRING, value),
    -            new ParseField(Fields.MAX_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.AVG_AS_STRING, value),
    -            new ParseField(Fields.AVG_AS_STRING)
    -        );
    -        objectParser.declareString(
    -            (agg, value) -> agg.valueAsString.put(Fields.SUM_AS_STRING, value),
    -            new ParseField(Fields.SUM_AS_STRING)
    -        );
    -    }
    -
    -    public static ParsedStats fromXContent(XContentParser parser, final String name) {
    -        ParsedStats parsedStats = PARSER.apply(parser, null);
    -        parsedStats.setName(name);
    -        return parsedStats;
    -    }
    -
    -    protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
    -        return builder;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java
    deleted file mode 100644
    index 93af840b80849..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java
    +++ /dev/null
    @@ -1,43 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedSum extends ParsedSingleValueNumericMetricsAggregation {
    -    @Override
    -    public String getType() {
    -        return SumAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(CommonFields.VALUE.getPreferredName(), value);
    -        if (valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(ParsedSum.class.getSimpleName(), true, ParsedSum::new);
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
    -    }
    -
    -    public static ParsedSum fromXContent(XContentParser parser, final String name) {
    -        ParsedSum sum = PARSER.apply(parser, null);
    -        sum.setName(name);
    -        return sum;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java
    deleted file mode 100644
    index 29858a430de89..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java
    +++ /dev/null
    @@ -1,37 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedTDigestPercentileRanks extends ParsedPercentileRanks {
    -
    -    @Override
    -    public String getType() {
    -        return InternalTDigestPercentileRanks.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedTDigestPercentileRanks.class.getSimpleName(),
    -        true,
    -        ParsedTDigestPercentileRanks::new
    -    );
    -    static {
    -        ParsedPercentiles.declarePercentilesFields(PARSER);
    -    }
    -
    -    public static ParsedTDigestPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedTDigestPercentileRanks aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java
    deleted file mode 100644
    index b5ab17ba335c3..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java
    +++ /dev/null
    @@ -1,57 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedTDigestPercentiles extends ParsedPercentiles implements Percentiles {
    -
    -    @Override
    -    public String getType() {
    -        return InternalTDigestPercentiles.NAME;
    -    }
    -
    -    @Override
    -    public double percentile(double percent) {
    -        return getPercentile(percent);
    -    }
    -
    -    @Override
    -    public String percentileAsString(double percent) {
    -        return getPercentileAsString(percent);
    -    }
    -
    -    @Override
    -    public double value(String name) {
    -        return percentile(Double.parseDouble(name));
    -    }
    -
    -    @Override
    -    public Iterable valueNames() {
    -        return percentiles.keySet().stream().map(Object::toString).toList();
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedTDigestPercentiles.class.getSimpleName(),
    -        true,
    -        ParsedTDigestPercentiles::new
    -    );
    -    static {
    -        ParsedPercentiles.declarePercentilesFields(PARSER);
    -    }
    -
    -    public static ParsedTDigestPercentiles fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedTDigestPercentiles aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java
    deleted file mode 100644
    index 39f96efcef81c..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java
    +++ /dev/null
    @@ -1,66 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedValueCount extends ParsedAggregation implements ValueCount {
    -
    -    private long valueCount;
    -
    -    @Override
    -    public double value() {
    -        return getValue();
    -    }
    -
    -    @Override
    -    public long getValue() {
    -        return valueCount;
    -    }
    -
    -    @Override
    -    public String getValueAsString() {
    -        // InternalValueCount doesn't print "value_as_string", but you can get a formatted value using
    -        // getValueAsString() using the raw formatter and converting the value to double
    -        return Double.toString(valueCount);
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return ValueCountAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.field(CommonFields.VALUE.getPreferredName(), valueCount);
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedValueCount.class.getSimpleName(),
    -        true,
    -        ParsedValueCount::new
    -    );
    -
    -    static {
    -        declareAggregationFields(PARSER);
    -        PARSER.declareLong((agg, value) -> agg.valueCount = value, CommonFields.VALUE);
    -    }
    -
    -    public static ParsedValueCount fromXContent(XContentParser parser, final String name) {
    -        ParsedValueCount sum = PARSER.apply(parser, null);
    -        sum.setName(name);
    -        return sum;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java
    deleted file mode 100644
    index 090c7b15ff195..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java
    +++ /dev/null
    @@ -1,56 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.metrics;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg {
    -
    -    @Override
    -    public double getValue() {
    -        return value();
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return WeightedAvgAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        // InternalWeightedAvg renders value only if the avg normalizer (count) is not 0.
    -        // We parse back `null` as Double.POSITIVE_INFINITY so we check for that value here to get the same xContent output
    -        boolean hasValue = value != Double.POSITIVE_INFINITY;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedWeightedAvg.class.getSimpleName(),
    -        true,
    -        ParsedWeightedAvg::new
    -    );
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
    -    }
    -
    -    public static ParsedWeightedAvg fromXContent(XContentParser parser, final String name) {
    -        ParsedWeightedAvg avg = PARSER.apply(parser, null);
    -        avg.setName(name);
    -        return avg;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java
    index d946ce3e14ea1..fedae36be0263 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java
    @@ -130,7 +130,7 @@ public TDigest(double compression, TDigestExecutionHint executionHint) {
             TDigest(StreamInput in) throws IOException {
                 this(
                     in.readDouble(),
    -                in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)
    +                in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)
                         ? in.readOptionalWriteable(TDigestExecutionHint::readFrom)
                         : TDigestExecutionHint.HIGH_ACCURACY
                 );
    @@ -235,7 +235,7 @@ public InternalNumericMetricsAggregation.MultiValue createEmptyPercentileRanksAg
             public void writeTo(StreamOutput out) throws IOException {
                 super.writeTo(out);
                 out.writeDouble(compression);
    -            if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +            if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                     out.writeOptionalWriteable(executionHint);
                 }
             }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
    index 23c26794f6bb5..0d0ed21556a92 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
    @@ -107,7 +107,7 @@ public final double compression() {
     
         public static void write(TDigestState state, StreamOutput out) throws IOException {
             out.writeDouble(state.compression);
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeString(state.type.toString());
                 out.writeVLong(state.tdigest.size());
             }
    @@ -123,7 +123,7 @@ public static TDigestState read(StreamInput in) throws IOException {
             double compression = in.readDouble();
             TDigestState state;
             long size = 0;
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 state = new TDigestState(Type.valueOf(in.readString()), compression);
                 size = in.readVLong();
             } else {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java
    index 5661edce6eb89..59317944930ec 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java
    @@ -12,8 +12,8 @@
     import org.elasticsearch.search.aggregations.Aggregation;
     import org.elasticsearch.search.aggregations.AggregationExecutionException;
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregation;
    +import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
     import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
     import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
    @@ -46,7 +46,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg
         }
     
         @Override
    -    public final InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) {
    +    public final InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) {
             preCollection();
             List parsedPath = AggregationPath.parse(bucketsPaths()[0]).getPathElements();
             for (Aggregation aggregation : aggregations) {
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java
    index c5e52448223c0..7f18b87adce3e 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java
    @@ -80,7 +80,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe
                         newBuckets.add(bucket);
                     } else {
                         final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false)
    -                        .map((p) -> (InternalAggregation) p)
                             .collect(Collectors.toCollection(ArrayList::new));
     
                         InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata());
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java
    index c51c60bf24ee5..2e2c46ac0b38a 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java
    @@ -54,7 +54,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe
                 }
     
                 List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false)
    -                .map((p) -> (InternalAggregation) p)
                     .collect(Collectors.toCollection(ArrayList::new));
                 aggs.add(new InternalSimpleValue(name(), sum, formatter, metadata()));
                 Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java
    deleted file mode 100644
    index 03fe874b0b487..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java
    +++ /dev/null
    @@ -1,65 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.Collections;
    -import java.util.List;
    -
    -public class ParsedBucketMetricValue extends ParsedSingleValueNumericMetricsAggregation implements BucketMetricValue {
    -
    -    private List keys = Collections.emptyList();
    -
    -    @Override
    -    public String[] keys() {
    -        return this.keys.toArray(new String[keys.size()]);
    -    }
    -
    -    @Override
    -    public String getType() {
    -        return InternalBucketMetricValue.NAME;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        boolean hasValue = Double.isInfinite(value) == false;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        builder.startArray(InternalBucketMetricValue.KEYS_FIELD.getPreferredName());
    -        for (String key : keys) {
    -            builder.value(key);
    -        }
    -        builder.endArray();
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedBucketMetricValue.class.getSimpleName(),
    -        true,
    -        ParsedBucketMetricValue::new
    -    );
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
    -        PARSER.declareStringArray((agg, value) -> agg.keys = value, InternalBucketMetricValue.KEYS_FIELD);
    -    }
    -
    -    public static ParsedBucketMetricValue fromXContent(XContentParser parser, final String name) {
    -        ParsedBucketMetricValue bucketMetricValue = PARSER.apply(parser, null);
    -        bucketMetricValue.setName(name);
    -        return bucketMetricValue;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java
    deleted file mode 100644
    index e7751230334d5..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java
    +++ /dev/null
    @@ -1,65 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.ObjectParser.ValueType;
    -import org.elasticsearch.xcontent.ParseField;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedDerivative extends ParsedSimpleValue {
    -
    -    private double normalizedValue;
    -    private String normalizedAsString;
    -    private boolean hasNormalizationFactor;
    -    private static final ParseField NORMALIZED_AS_STRING = new ParseField("normalized_value_as_string");
    -    private static final ParseField NORMALIZED = new ParseField("normalized_value");
    -
    -    @Override
    -    public String getType() {
    -        return "derivative";
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedDerivative.class.getSimpleName(),
    -        true,
    -        ParsedDerivative::new
    -    );
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NaN);
    -        PARSER.declareField((agg, normalized) -> {
    -            agg.normalizedValue = normalized;
    -            agg.hasNormalizationFactor = true;
    -        }, (parser, context) -> parseDouble(parser, Double.NaN), NORMALIZED, ValueType.DOUBLE_OR_NULL);
    -        PARSER.declareString((agg, normalAsString) -> agg.normalizedAsString = normalAsString, NORMALIZED_AS_STRING);
    -    }
    -
    -    public static ParsedDerivative fromXContent(XContentParser parser, final String name) {
    -        ParsedDerivative derivative = PARSER.apply(parser, null);
    -        derivative.setName(name);
    -        return derivative;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        super.doXContentBody(builder, params);
    -        if (hasNormalizationFactor) {
    -            boolean hasValue = Double.isNaN(normalizedValue) == false;
    -            builder.field(NORMALIZED.getPreferredName(), hasValue ? normalizedValue : null);
    -            if (hasValue && normalizedAsString != null) {
    -                builder.field(NORMALIZED_AS_STRING.getPreferredName(), normalizedAsString);
    -            }
    -        }
    -        return builder;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java
    deleted file mode 100644
    index 33725e7bc3199..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java
    +++ /dev/null
    @@ -1,37 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -public class ParsedExtendedStatsBucket extends ParsedExtendedStats implements ExtendedStatsBucket {
    -
    -    @Override
    -    public String getType() {
    -        return ExtendedStatsBucketPipelineAggregationBuilder.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedExtendedStatsBucket.class.getSimpleName(),
    -        true,
    -        ParsedExtendedStatsBucket::new
    -    );
    -
    -    static {
    -        declareExtendedStatsFields(PARSER);
    -    }
    -
    -    public static ParsedExtendedStatsBucket fromXContent(XContentParser parser, final String name) {
    -        ParsedExtendedStatsBucket parsedStatsBucket = PARSER.apply(parser, null);
    -        parsedStatsBucket.setName(name);
    -        return parsedStatsBucket;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java
    deleted file mode 100644
    index 7da76d2d4c2eb..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java
    +++ /dev/null
    @@ -1,95 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles;
    -import org.elasticsearch.search.aggregations.metrics.Percentiles;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -import java.util.Map.Entry;
    -
    -public class ParsedPercentilesBucket extends ParsedPercentiles implements Percentiles {
    -
    -    @Override
    -    public String getType() {
    -        return PercentilesBucketPipelineAggregationBuilder.NAME;
    -    }
    -
    -    @Override
    -    public double percentile(double percent) throws IllegalArgumentException {
    -        Double value = percentiles.get(percent);
    -        if (value == null) {
    -            throw new IllegalArgumentException(
    -                "Percent requested ["
    -                    + String.valueOf(percent)
    -                    + "] was not"
    -                    + " one of the computed percentiles. Available keys are: "
    -                    + percentiles.keySet()
    -            );
    -        }
    -        return value;
    -    }
    -
    -    @Override
    -    public String percentileAsString(double percent) {
    -        double value = percentile(percent); // check availability as unformatted value
    -        String valueAsString = percentilesAsString.get(percent);
    -        if (valueAsString != null) {
    -            return valueAsString;
    -        } else {
    -            return Double.toString(value);
    -        }
    -    }
    -
    -    @Override
    -    public double value(String name) {
    -        return percentile(Double.parseDouble(name));
    -    }
    -
    -    @Override
    -    public Iterable valueNames() {
    -        return percentiles.keySet().stream().map(Object::toString).toList();
    -    }
    -
    -    @Override
    -    public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        builder.startObject("values");
    -        for (Entry percent : percentiles.entrySet()) {
    -            double value = percent.getValue();
    -            boolean hasValue = Double.isNaN(value) == false;
    -            Double key = percent.getKey();
    -            builder.field(Double.toString(key), hasValue ? value : null);
    -            String valueAsString = percentilesAsString.get(key);
    -            if (hasValue && valueAsString != null) {
    -                builder.field(key + "_as_string", valueAsString);
    -            }
    -        }
    -        builder.endObject();
    -        return builder;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedPercentilesBucket.class.getSimpleName(),
    -        true,
    -        ParsedPercentilesBucket::new
    -    );
    -
    -    static {
    -        ParsedPercentiles.declarePercentilesFields(PARSER);
    -    }
    -
    -    public static ParsedPercentilesBucket fromXContent(XContentParser parser, String name) throws IOException {
    -        ParsedPercentilesBucket aggregation = PARSER.parse(parser, null);
    -        aggregation.setName(name);
    -        return aggregation;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java
    deleted file mode 100644
    index 0d02ba6e5365b..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java
    +++ /dev/null
    @@ -1,50 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.io.IOException;
    -
    -public class ParsedSimpleValue extends ParsedSingleValueNumericMetricsAggregation implements SimpleValue {
    -
    -    @Override
    -    public String getType() {
    -        return InternalSimpleValue.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedSimpleValue.class.getSimpleName(),
    -        true,
    -        ParsedSimpleValue::new
    -    );
    -
    -    static {
    -        declareSingleValueFields(PARSER, Double.NaN);
    -    }
    -
    -    public static ParsedSimpleValue fromXContent(XContentParser parser, final String name) {
    -        ParsedSimpleValue simpleValue = PARSER.apply(parser, null);
    -        simpleValue.setName(name);
    -        return simpleValue;
    -    }
    -
    -    @Override
    -    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
    -        boolean hasValue = Double.isNaN(value) == false;
    -        builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
    -        if (hasValue && valueAsString != null) {
    -            builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
    -        }
    -        return builder;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java
    deleted file mode 100644
    index a2a784acfdb34..0000000000000
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java
    +++ /dev/null
    @@ -1,37 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.search.aggregations.pipeline;
    -
    -import org.elasticsearch.search.aggregations.metrics.ParsedStats;
    -import org.elasticsearch.xcontent.ObjectParser;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -public class ParsedStatsBucket extends ParsedStats implements StatsBucket {
    -
    -    @Override
    -    public String getType() {
    -        return StatsBucketPipelineAggregationBuilder.NAME;
    -    }
    -
    -    private static final ObjectParser PARSER = new ObjectParser<>(
    -        ParsedStatsBucket.class.getSimpleName(),
    -        true,
    -        ParsedStatsBucket::new
    -    );
    -
    -    static {
    -        declareStatsFields(PARSER);
    -    }
    -
    -    public static ParsedStatsBucket fromXContent(XContentParser parser, final String name) {
    -        ParsedStatsBucket parsedStatsBucket = PARSER.apply(parser, null);
    -        parsedStatsBucket.setName(name);
    -        return parsedStatsBucket;
    -    }
    -}
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java
    index 7225d7652b3b8..c7eb662efebd5 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java
    @@ -85,7 +85,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe
                     double diff = thisBucketValue - lagValue;
     
                     List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false)
    -                    .map((p) -> (InternalAggregation) p)
                         .collect(Collectors.toCollection(ArrayList::new));
                     aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata()));
                     newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java
    index 9c63e13afa039..7b82cd38881df 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java
    @@ -9,7 +9,6 @@
     package org.elasticsearch.search.aggregations.pipeline;
     
     import org.elasticsearch.search.aggregations.AggregationReduceContext;
    -import org.elasticsearch.search.aggregations.Aggregations;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
     
    @@ -30,5 +29,5 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe
             });
         }
     
    -    public abstract InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context);
    +    public abstract InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context);
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java
    index c2aa26409f010..1e9d34671b2b6 100644
    --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java
    +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java
    @@ -29,6 +29,7 @@
     import org.elasticsearch.index.fielddata.IndexFieldData;
     import org.elasticsearch.index.mapper.DocCountFieldMapper;
     import org.elasticsearch.index.mapper.MappedFieldType;
    +import org.elasticsearch.index.mapper.MappingLookup;
     import org.elasticsearch.index.mapper.NestedLookup;
     import org.elasticsearch.index.query.QueryBuilder;
     import org.elasticsearch.index.query.Rewriteable;
    @@ -307,6 +308,14 @@ public final AggregationUsageService getUsageService() {
     
         public abstract Set sourcePath(String fullName);
     
    +    /**
    +     * Returns the MappingLookup for the index, if one is initialized.
    +     */
    +    @Nullable
    +    public MappingLookup getMappingLookup() {
    +        return null;
    +    }
    +
         /**
          * Does this index have a {@code _doc_count} field in any segment?
          */
    @@ -611,6 +620,11 @@ public Set sourcePath(String fullName) {
                 return context.sourcePath(fullName);
             }
     
    +        @Override
    +        public MappingLookup getMappingLookup() {
    +            return context.getMappingLookup();
    +        }
    +
             @Override
             public void close() {
                 /*
    diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
    index 0211e43933ec3..bc4b2a85bab68 100644
    --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
    @@ -219,7 +219,7 @@ public SearchSourceBuilder(StreamInput in) throws IOException {
             indexBoosts = in.readCollectionAsList(IndexBoost::new);
             minScore = in.readOptionalFloat();
             postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 subSearchSourceBuilders = in.readCollectionAsList(SubSearchSourceBuilder::new);
             } else {
                 QueryBuilder queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
    @@ -289,7 +289,7 @@ public void writeTo(StreamOutput out) throws IOException {
             out.writeCollection(indexBoosts);
             out.writeOptionalFloat(minScore);
             out.writeOptionalNamedWriteable(postQueryBuilder);
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
                 out.writeCollection(subSearchSourceBuilders);
             } else if (out.getTransportVersion().before(TransportVersions.V_8_4_0) && subSearchSourceBuilders.size() >= 2) {
                 throw new IllegalArgumentException("cannot serialize [sub_searches] to version [" + out.getTransportVersion() + "]");
    diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
    index f787e30644658..c77a5c3c09f81 100644
    --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
    @@ -102,11 +102,21 @@ private CollapseBuilder setField(String field) {
         }
     
         public CollapseBuilder setInnerHits(InnerHitBuilder innerHit) {
    +        if (innerHit.getName() == null) {
    +            throw new IllegalArgumentException("inner_hits must have a [name]; set the [name] field in the inner_hits definition");
    +        }
             this.innerHits = Collections.singletonList(innerHit);
             return this;
         }
     
         public CollapseBuilder setInnerHits(List innerHits) {
    +        if (innerHits != null) {
    +            for (InnerHitBuilder innerHit : innerHits) {
    +                if (innerHit.getName() == null) {
    +                    throw new IllegalArgumentException("inner_hits must have a [name]; set the [name] field in the inner_hits definition");
    +                }
    +            }
    +        }
             this.innerHits = innerHits;
             return this;
         }
    diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsKnnResults.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsKnnResults.java
    index 3bb6a002fb17f..616b773ad1def 100644
    --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsKnnResults.java
    +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsKnnResults.java
    @@ -16,7 +16,7 @@
     
     import java.io.IOException;
     
    -import static org.elasticsearch.TransportVersions.NESTED_KNN_VECTOR_QUERY_V;
    +import static org.elasticsearch.TransportVersions.V_8_11_X;
     
     public class DfsKnnResults implements Writeable {
         private final String nestedPath;
    @@ -29,7 +29,7 @@ public DfsKnnResults(String nestedPath, ScoreDoc[] scoreDocs) {
     
         public DfsKnnResults(StreamInput in) throws IOException {
             scoreDocs = in.readArray(Lucene::readScoreDoc, ScoreDoc[]::new);
    -        if (in.getTransportVersion().onOrAfter(NESTED_KNN_VECTOR_QUERY_V)) {
    +        if (in.getTransportVersion().onOrAfter(V_8_11_X)) {
                 nestedPath = in.readOptionalString();
             } else {
                 nestedPath = null;
    @@ -46,7 +46,7 @@ public ScoreDoc[] scoreDocs() {
     
         public void writeTo(StreamOutput out) throws IOException {
             out.writeArray(Lucene::writeScoreDoc, scoreDocs);
    -        if (out.getTransportVersion().onOrAfter(NESTED_KNN_VECTOR_QUERY_V)) {
    +        if (out.getTransportVersion().onOrAfter(V_8_11_X)) {
                 out.writeOptionalString(nestedPath);
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java
    index 5d3288408c99b..dab127e8b4e56 100644
    --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java
    +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java
    @@ -42,6 +42,8 @@
     import java.util.List;
     import java.util.Map;
     
    +import static org.elasticsearch.index.query.AbstractQueryBuilder.DEFAULT_BOOST;
    +
     /**
      * DFS phase of a search request, used to make scoring 100% accurate by collecting additional info from each shard before the query phase.
      * The additional information is used to better compare the scores coming from all the shards, which depend on local factors (e.g. idf).
    @@ -181,6 +183,8 @@ private static void executeKnnVectorQuery(SearchContext context) throws IOExcept
             SearchExecutionContext searchExecutionContext = context.getSearchExecutionContext();
             List knnSearch = context.request().source().knnSearch();
             List knnVectorQueryBuilders = knnSearch.stream().map(KnnSearchBuilder::toQueryBuilder).toList();
    +        // Since we apply boost during the DfsQueryPhase, we should not apply boost here:
    +        knnVectorQueryBuilders.forEach(knnVectorQueryBuilder -> knnVectorQueryBuilder.boost(DEFAULT_BOOST));
     
             if (context.request().getAliasFilter().getQueryBuilder() != null) {
                 for (KnnVectorQueryBuilder knnVectorQueryBuilder : knnVectorQueryBuilders) {
    diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
    index 91e4fb791f62d..c106d9b6f4cb2 100644
    --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
    +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
    @@ -82,6 +82,7 @@ public void execute(SearchContext context, int[] docIdsToLoad) {
                 // Only set the shardResults if building search hits was successful
                 if (hits != null) {
                     context.fetchResult().shardResult(hits, profileResult);
    +                hits.decRef();
                 }
             }
         }
    @@ -173,7 +174,7 @@ protected SearchHit nextDoc(int doc) throws IOException {
             }
     
             TotalHits totalHits = context.getTotalHits();
    -        return new SearchHits(hits, totalHits, context.getMaxScore());
    +        return SearchHits.unpooled(hits, totalHits, context.getMaxScore());
         }
     
         List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) {
    @@ -247,11 +248,12 @@ private static HitContext prepareNonNestedHitContext(
     
             String id = idLoader.getId(subDocId);
             if (id == null) {
    -            SearchHit hit = new SearchHit(docId, null);
    +            // TODO: can we use pooled buffers here as well?
    +            SearchHit hit = SearchHit.unpooled(docId, null);
                 Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId));
                 return new HitContext(hit, subReaderContext, subDocId, Map.of(), source);
             } else {
    -            SearchHit hit = new SearchHit(docId, id);
    +            SearchHit hit = SearchHit.unpooled(docId, id);
                 Source source;
                 if (requiresSource) {
                     Timer timer = profiler.startLoadingSource();
    @@ -328,7 +330,7 @@ private static HitContext prepareNestedHitContext(
             assert nestedIdentity != null;
             Source nestedSource = nestedIdentity.extractSource(rootSource);
     
    -        SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity);
    +        SearchHit hit = SearchHit.unpooled(topDocId, rootId, nestedIdentity);
             return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource);
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java
    index ea5ab13c2e8ee..cc39113f2009f 100644
    --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java
    +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java
    @@ -70,6 +70,11 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde
                     searchHits[docs[i].index] = nextDoc(docs[i].docId);
                 }
             } catch (Exception e) {
    +            for (SearchHit searchHit : searchHits) {
    +                if (searchHit != null) {
    +                    searchHit.decRef();
    +                }
    +            }
                 throw new FetchPhaseExecutionException(shardTarget, "Error running fetch phase for doc [" + currentDoc + "]", e);
             }
             return searchHits;
    diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
    index aa5c1f2cbd992..6cf924a239208 100644
    --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
    +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
    @@ -30,7 +30,12 @@ public final class FetchSearchResult extends SearchPhaseResult {
     
         private ProfileResult profileResult;
     
    -    private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> hits = null));
    +    private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> {
    +        if (hits != null) {
    +            hits.decRef();
    +            hits = null;
    +        }
    +    }));
     
         public FetchSearchResult() {}
     
    @@ -42,12 +47,13 @@ public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget)
         public FetchSearchResult(StreamInput in) throws IOException {
             super(in);
             contextId = new ShardSearchContextId(in);
    -        hits = new SearchHits(in);
    +        hits = SearchHits.readFrom(in, true);
             profileResult = in.readOptionalWriteable(ProfileResult::new);
         }
     
         @Override
         public void writeTo(StreamOutput out) throws IOException {
    +        assert hasReferences();
             contextId.writeTo(out);
             hits.writeTo(out);
             out.writeOptionalWriteable(profileResult);
    @@ -61,6 +67,7 @@ public FetchSearchResult fetchResult() {
         public void shardResult(SearchHits hits, ProfileResult profileResult) {
             assert assertNoSearchTarget(hits);
             this.hits = hits;
    +        hits.incRef();
             assert this.profileResult == null;
             this.profileResult = profileResult;
         }
    @@ -73,6 +80,7 @@ private static boolean assertNoSearchTarget(SearchHits hits) {
         }
     
         public SearchHits hits() {
    +        assert hasReferences();
             return hits;
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java
    index feb0547a32536..ccb54801472a6 100644
    --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java
    +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java
    @@ -103,7 +103,9 @@ private void hitExecute(Map innerHi
                         searchHitFields.sortValues(fieldDoc.fields, innerHitsContext.sort().formats);
                     }
                 }
    -            results.put(entry.getKey(), fetchResult.hits());
    +            var h = fetchResult.hits();
    +            results.put(entry.getKey(), h);
    +            h.mustIncRef();
             }
         }
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
    index d6a3334dd035b..0263c6e83b17a 100644
    --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
    +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
    @@ -50,7 +50,6 @@
     import java.util.Arrays;
     import java.util.Collections;
     import java.util.Comparator;
    -import java.util.HashSet;
     import java.util.List;
     import java.util.Objects;
     import java.util.PriorityQueue;
    @@ -194,8 +193,9 @@ public void setAggregatedDfs(AggregatedDfs aggregatedDfs) {
     
         @Override
         public Query rewrite(Query original) throws IOException {
    +        Timer rewriteTimer = null;
             if (profiler != null) {
    -            profiler.startRewriteTime();
    +            rewriteTimer = profiler.startRewriteTime();
             }
             try {
                 return super.rewrite(original);
    @@ -204,7 +204,7 @@ public Query rewrite(Query original) throws IOException {
                 return new MatchNoDocsQuery("rewrite timed out");
             } finally {
                 if (profiler != null) {
    -                profiler.stopAndAddRewriteTime();
    +                profiler.stopAndAddRewriteTime(rewriteTimer);
                 }
             }
         }
    @@ -524,13 +524,12 @@ public DirectoryReader getDirectoryReader() {
     
         private static class MutableQueryTimeout implements ExitableDirectoryReader.QueryCancellation {
     
    -        private final Set runnables = new HashSet<>();
    +        private final List runnables = new ArrayList<>();
     
             private Runnable add(Runnable action) {
                 Objects.requireNonNull(action, "cancellation runnable should not be null");
    -            if (runnables.add(action) == false) {
    -                throw new IllegalArgumentException("Cancellation runnable already added");
    -            }
    +            assert runnables.contains(action) == false : "Cancellation runnable already added";
    +            runnables.add(action);
                 return action;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
    index 18ae708d8fec3..2023ee2e8d4b6 100644
    --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
    +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
    @@ -284,8 +284,7 @@ public ShardSearchRequest(StreamInput in) throws IOException {
             numberOfShards = in.readVInt();
             scroll = in.readOptionalWriteable(Scroll::new);
             source = in.readOptionalWriteable(SearchSourceBuilder::new);
    -        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)
    -            && in.getTransportVersion().before(TransportVersions.V_8_500_020)) {
    +        if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) && in.getTransportVersion().before(TransportVersions.V_8_9_X)) {
                 // to deserialize between the 8.8 and 8.500.020 version we need to translate
                 // the rank queries into sub searches if we are ranking; if there are no rank queries
                 // we deserialize the empty list and do nothing
    @@ -360,8 +359,7 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce
             }
             out.writeOptionalWriteable(scroll);
             out.writeOptionalWriteable(source);
    -        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)
    -            && out.getTransportVersion().before(TransportVersions.V_8_500_020)) {
    +        if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) && out.getTransportVersion().before(TransportVersions.V_8_9_X)) {
                 // to serialize between the 8.8 and 8.500.020 version we need to translate
                 // the sub searches into rank queries if we are ranking, otherwise, we
                 // ignore this because linear combination will have multiple sub searches in
    diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java b/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java
    index 686e628d7faef..14ff1aaa8a34f 100644
    --- a/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java
    +++ b/server/src/main/java/org/elasticsearch/search/profile/query/InternalQueryProfileTree.java
    @@ -11,6 +11,9 @@
     import org.apache.lucene.search.Query;
     import org.elasticsearch.search.profile.AbstractInternalProfileTree;
     import org.elasticsearch.search.profile.ProfileResult;
    +import org.elasticsearch.search.profile.Timer;
    +
    +import java.util.concurrent.atomic.AtomicLong;
     
     /**
      * This class tracks the dependency tree for queries (scoring and rewriting) and
    @@ -20,8 +23,7 @@
     final class InternalQueryProfileTree extends AbstractInternalProfileTree {
     
         /** Rewrite time */
    -    private long rewriteTime;
    -    private long rewriteScratch;
    +    private final AtomicLong rewriteTime = new AtomicLong(0L);
     
         @Override
         protected QueryProfileBreakdown createProfileBreakdown() {
    @@ -44,11 +46,12 @@ protected String getDescriptionFromElement(Query query) {
         }
     
         /**
    -     * Begin timing a query for a specific Timing context
    +     * Begin timing a query for a specific Timing context and return the running timer
          */
    -    public void startRewriteTime() {
    -        assert rewriteScratch == 0;
    -        rewriteScratch = System.nanoTime();
    +    public Timer startRewriteTime() {
    +        Timer timer = new Timer();
    +        timer.start();
    +        return timer;
         }
     
         /**
    @@ -59,14 +62,15 @@ public void startRewriteTime() {
          *
          * @return          The elapsed time
          */
    -    public long stopAndAddRewriteTime() {
    -        long time = Math.max(1, System.nanoTime() - rewriteScratch);
    -        rewriteTime += time;
    -        rewriteScratch = 0;
    -        return time;
    +    public long stopAndAddRewriteTime(Timer timer) {
    +        timer.stop();
    +        assert timer.getCount() == 1L : "stopAndAddRewriteTime() called without a matching startRewriteTime()";
    +        long time = Math.max(1, timer.getApproximateTiming());
    +        return rewriteTime.addAndGet(time);
         }
     
         public long getRewriteTime() {
    -        return rewriteTime;
    +        return rewriteTime.get();
         }
    +
     }
    diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java
    index a40b1284238b2..e1933f4552485 100644
    --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java
    +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java
    @@ -10,8 +10,9 @@
     
     import org.apache.lucene.search.Query;
     import org.elasticsearch.search.profile.AbstractProfiler;
    +import org.elasticsearch.search.profile.Timer;
     
    -import java.util.Objects;
    +import static java.util.Objects.requireNonNull;
     
     /**
      * This class acts as a thread-local storage for profiling a query.  It also
    @@ -50,15 +51,15 @@ public void setCollectorResult(CollectorResult collectorResult) {
             if (this.collectorResult != null) {
                 throw new IllegalStateException("The collector result can only be set once.");
             }
    -        this.collectorResult = Objects.requireNonNull(collectorResult);
    +        this.collectorResult = requireNonNull(collectorResult);
         }
     
         /**
          * Begin timing the rewrite phase of a request.  All rewrites are accumulated together into a
          * single metric
          */
    -    public void startRewriteTime() {
    -        ((InternalQueryProfileTree) profileTree).startRewriteTime();
    +    public Timer startRewriteTime() {
    +        return ((InternalQueryProfileTree) profileTree).startRewriteTime();
         }
     
         /**
    @@ -67,8 +68,8 @@ public void startRewriteTime() {
          *
          * @return cumulative rewrite time
          */
    -    public long stopAndAddRewriteTime() {
    -        return ((InternalQueryProfileTree) profileTree).stopAndAddRewriteTime();
    +    public long stopAndAddRewriteTime(Timer rewriteTimer) {
    +        return ((InternalQueryProfileTree) profileTree).stopAndAddRewriteTime(requireNonNull(rewriteTimer));
         }
     
         /**
    diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java
    index 76ee7e09ad870..4c42daba22b7a 100644
    --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java
    @@ -73,6 +73,8 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum
             RescorerBuilder rescorer = null;
             Integer windowSize = null;
             XContentParser.Token token;
    +        String rescorerType = null;
    +
             while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                 if (token == XContentParser.Token.FIELD_NAME) {
                     fieldName = parser.currentName();
    @@ -83,8 +85,11 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum
                         throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]");
                     }
                 } else if (token == XContentParser.Token.START_OBJECT) {
    -                rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null);
    -                rescorerNameConsumer.accept(fieldName);
    +                if (fieldName != null) {
    +                    rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null);
    +                    rescorerNameConsumer.accept(fieldName);
    +                    rescorerType = fieldName;
    +                }
                 } else {
                     throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]");
                 }
    @@ -92,9 +97,13 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum
             if (rescorer == null) {
                 throw new ParsingException(parser.getTokenLocation(), "missing rescore type");
             }
    +
             if (windowSize != null) {
                 rescorer.windowSize(windowSize.intValue());
    +        } else if (rescorer.isWindowSizeRequired()) {
    +            throw new ParsingException(parser.getTokenLocation(), "window_size is required for rescorer of type [" + rescorerType + "]");
             }
    +
             return rescorer;
         }
     
    @@ -111,11 +120,21 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
     
         protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException;
     
    +    /**
    +     * Indicate if the window_size is a required parameter for the rescorer.
    +     */
    +    protected boolean isWindowSizeRequired() {
    +        return false;
    +    }
    +
         /**
          * Build the {@linkplain RescoreContext} that will be used to actually
          * execute the rescore against a particular shard.
          */
         public final RescoreContext buildContext(SearchExecutionContext context) throws IOException {
    +        if (isWindowSizeRequired()) {
    +            assert windowSize != null;
    +        }
             int finalWindowSize = windowSize == null ? DEFAULT_WINDOW_SIZE : windowSize;
             RescoreContext rescoreContext = innerBuildContext(finalWindowSize, context);
             return rescoreContext;
    diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
    index 40ff9c6eaf6ee..7210c35d961ac 100644
    --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
    +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
    @@ -271,7 +271,9 @@ public Option(int docID, Text text, float score, Map> contex
                 public Option(StreamInput in) throws IOException {
                     super(in);
                     this.doc = Lucene.readScoreDoc(in);
    -                this.hit = in.readOptionalWriteable(SearchHit::readFrom);
    +                if (in.readBoolean()) {
    +                    this.hit = SearchHit.readFrom(in, false);
    +                }
                     int contextSize = in.readInt();
                     this.contexts = Maps.newLinkedHashMapWithExpectedSize(contextSize);
                     for (int i = 0; i < contextSize; i++) {
    @@ -309,7 +311,7 @@ public void setShardIndex(int shardIndex) {
                 }
     
                 public void setHit(SearchHit hit) {
    -                this.hit = hit;
    +                this.hit = hit == null ? null : hit.asUnpooled();
                 }
     
                 @Override
    diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java
    new file mode 100644
    index 0000000000000..d292f61dcb085
    --- /dev/null
    +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java
    @@ -0,0 +1,116 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.search.vectors;
    +
    +import org.apache.lucene.search.Query;
    +import org.elasticsearch.TransportVersion;
    +import org.elasticsearch.TransportVersions;
    +import org.elasticsearch.common.io.stream.StreamInput;
    +import org.elasticsearch.common.io.stream.StreamOutput;
    +import org.elasticsearch.index.mapper.MappedFieldType;
    +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
    +import org.elasticsearch.index.query.AbstractQueryBuilder;
    +import org.elasticsearch.index.query.QueryBuilder;
    +import org.elasticsearch.index.query.QueryRewriteContext;
    +import org.elasticsearch.index.query.SearchExecutionContext;
    +import org.elasticsearch.xcontent.XContentBuilder;
    +
    +import java.io.IOException;
    +import java.util.Arrays;
    +import java.util.Objects;
    +
    +/**
    + * Exact knn query builder. Will iterate and score all documents that have the provided knn field in the index.
    + * Useful in inner hits scoring scenarios.
    + */
    +public class ExactKnnQueryBuilder extends AbstractQueryBuilder {
    +    public static final String NAME = "exact_knn";
    +    private final String field;
    +    private final float[] query;
    +
    +    /**
    +     * Creates a query builder.
    +     *
    +     * @param query    the query vector
    +     * @param field    the field that was used for the kNN query
    +     */
    +    public ExactKnnQueryBuilder(float[] query, String field) {
    +        this.query = query;
    +        this.field = field;
    +    }
    +
    +    public ExactKnnQueryBuilder(StreamInput in) throws IOException {
    +        super(in);
    +        this.query = in.readFloatArray();
    +        this.field = in.readString();
    +    }
    +
    +    String getField() {
    +        return field;
    +    }
    +
    +    float[] getQuery() {
    +        return query;
    +    }
    +
    +    @Override
    +    public String getWriteableName() {
    +        return NAME;
    +    }
    +
    +    @Override
    +    protected void doWriteTo(StreamOutput out) throws IOException {
    +        out.writeFloatArray(query);
    +        out.writeString(field);
    +    }
    +
    +    @Override
    +    protected void doXContent(XContentBuilder builder, Params params) throws IOException {
    +        builder.startObject(NAME);
    +        builder.field("query", query);
    +        builder.field("field", field);
    +        boostAndQueryNameToXContent(builder);
    +        builder.endObject();
    +    }
    +
    +    @Override
    +    protected Query doToQuery(SearchExecutionContext context) throws IOException {
    +        final MappedFieldType fieldType = context.getFieldType(field);
    +        if (fieldType == null) {
    +            throw new IllegalArgumentException("field [" + field + "] does not exist in the mapping");
    +        }
    +        if (fieldType instanceof DenseVectorFieldMapper.DenseVectorFieldType == false) {
    +            throw new IllegalArgumentException(
    +                "[" + NAME + "] queries are only supported on [" + DenseVectorFieldMapper.CONTENT_TYPE + "] fields"
    +            );
    +        }
    +        final DenseVectorFieldMapper.DenseVectorFieldType vectorFieldType = (DenseVectorFieldMapper.DenseVectorFieldType) fieldType;
    +        return vectorFieldType.createExactKnnQuery(query);
    +    }
    +
    +    @Override
    +    protected boolean doEquals(ExactKnnQueryBuilder other) {
    +        return field.equals(other.field) && Arrays.equals(query, other.query);
    +    }
    +
    +    @Override
    +    protected int doHashCode() {
    +        return Objects.hash(field, Arrays.hashCode(query));
    +    }
    +
    +    @Override
    +    protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
    +        return this;
    +    }
    +
    +    @Override
    +    public TransportVersion getMinimalSupportedVersion() {
    +        return TransportVersions.NESTED_KNN_MORE_INNER_HITS;
    +    }
    +}
    diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
    index 13ca1d3dc1db2..ea9b2df942808 100644
    --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
    +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java
    @@ -35,6 +35,8 @@
     public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder {
         public static final String NAME = "knn_score_doc";
         private final ScoreDoc[] scoreDocs;
    +    private final String fieldName;
    +    private final float[] queryVector;
     
         /**
          * Creates a query builder.
    @@ -42,13 +44,26 @@ public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder rewrittenQueries = new ArrayList<>(filterQueries.size());
             for (QueryBuilder query : filterQueries) {
    @@ -260,6 +263,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
     
             DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType;
             String parentPath = context.nestedLookup().getNestedParent(fieldName);
    +
             if (parentPath != null) {
                 NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper();
                 if (originalObjectMapper != null) {
    diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java
    index 74d3c6a084217..6f6d62d7677d8 100644
    --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java
    +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java
    @@ -90,7 +90,6 @@
     import java.util.HashSet;
     import java.util.LinkedHashMap;
     import java.util.List;
    -import java.util.Locale;
     import java.util.Map;
     import java.util.Objects;
     import java.util.Optional;
    @@ -524,12 +523,12 @@ static void refreshRepositoryUuids(boolean enabled, RepositoriesService reposito
                             ActionListener.releaseAfter(new ActionListener<>() {
                                 @Override
                                 public void onResponse(RepositoryData repositoryData) {
    -                                logger.debug(() -> format("repository UUID [{}] refresh completed", repositoryName));
    +                                logger.debug(() -> format("repository UUID [%s] refresh completed", repositoryName));
                                 }
     
                                 @Override
                                 public void onFailure(Exception e) {
    -                                logger.debug(() -> format("repository UUID [{}] refresh failed", repositoryName), e);
    +                                logger.debug(() -> format("repository UUID [%s] refresh failed", repositoryName), e);
                                 }
                             }, refs.acquire())
                         );
    @@ -1121,8 +1120,7 @@ private static IndexMetadata updateIndexSettings(
                     if (Objects.equals(previous, changed) == false) {
                         throw new SnapshotRestoreException(
                             snapshot,
    -                        String.format(
    -                            Locale.ROOT,
    +                        format(
                                 "cannot change value of [%s] when restoring searchable snapshot [%s:%s] as index %s",
                                 SEARCHABLE_SNAPSHOTS_DELETE_SNAPSHOT_ON_INDEX_DELETION,
                                 snapshot.getRepository(),
    @@ -1761,8 +1759,7 @@ private static void ensureSearchableSnapshotsRestorable(
                     throw new SnapshotRestoreException(
                         repositoryName,
                         snapshotInfo.snapshotId().getName(),
    -                    String.format(
    -                        Locale.ROOT,
    +                    format(
                             "cannot mount snapshot [%s/%s:%s] as index [%s] with the deletion of snapshot on index removal enabled "
                                 + "[index.store.snapshot.delete_searchable_snapshot: true]; snapshot contains [%d] indices instead of 1.",
                             repositoryName,
    @@ -1796,8 +1793,7 @@ private static void ensureSearchableSnapshotsRestorable(
                         throw new SnapshotRestoreException(
                             repositoryName,
                             snapshotInfo.snapshotId().getName(),
    -                        String.format(
    -                            Locale.ROOT,
    +                        format(
                                 "cannot mount snapshot [%s/%s:%s] as index [%s] with [index.store.snapshot.delete_searchable_snapshot: %b]; "
                                     + "another index %s is mounted with [index.store.snapshot.delete_searchable_snapshot: %b].",
                                 repositoryName,
    diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
    index f973d456a6b79..bbabfca866a69 100644
    --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
    +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
    @@ -450,7 +450,7 @@ private void startCloning(Repository repository, SnapshotsInProgress.Entry clone
                 endingSnapshots.add(targetSnapshot);
                 initializingClones.remove(targetSnapshot);
                 logger.info(() -> "Failed to start snapshot clone [" + cloneEntry + "]", e);
    -            removeFailedSnapshotFromClusterState(targetSnapshot, e, null);
    +            removeFailedSnapshotFromClusterState(targetSnapshot, e, null, ShardGenerations.EMPTY);
             };
     
             // 1. step, load SnapshotInfo to make sure that source snapshot was successful for the indices we want to clone
    @@ -1312,7 +1312,12 @@ private void endSnapshot(SnapshotsInProgress.Entry entry, Metadata metadata, @Nu
             if (entry.isClone() && entry.state() == State.FAILED) {
                 logger.debug("Removing failed snapshot clone [{}] from cluster state", entry);
                 if (newFinalization) {
    -                removeFailedSnapshotFromClusterState(snapshot, new SnapshotException(snapshot, entry.failure()), null);
    +                removeFailedSnapshotFromClusterState(
    +                    snapshot,
    +                    new SnapshotException(snapshot, entry.failure()),
    +                    null,
    +                    ShardGenerations.EMPTY
    +                );
                 }
                 return;
             }
    @@ -1496,7 +1501,15 @@ private void finalizeSnapshotEntry(Snapshot snapshot, Metadata metadata, Reposit
                                 // a fatal like e.g. this node stopped being the master node
                                 snapshotListeners.onResponse(endAndGetListenersToResolve(snapshot));
                                 runNextQueuedOperation(updatedRepositoryData, repository, true);
    -                        }, e -> handleFinalizationFailure(e, snapshot, repositoryData)),
    +                        },
    +                            e -> handleFinalizationFailure(
    +                                e,
    +                                snapshot,
    +                                repositoryData,
    +                                // we might have written the new root blob before failing here, so we must use the updated shardGenerations
    +                                shardGenerations
    +                            )
    +                        ),
                             snInfo -> snapshotListeners.addListener(new ActionListener<>() {
                                 @Override
                                 public void onResponse(List> actionListeners) {
    @@ -1512,11 +1525,20 @@ public void onFailure(Exception e) {
                             })
                         )
                     );
    -            }, e -> handleFinalizationFailure(e, snapshot, repositoryData)));
    +            },
    +                e -> handleFinalizationFailure(
    +                    e,
    +                    snapshot,
    +                    repositoryData,
    +                    // a failure here means the root blob was not updated, but the updated shard generation blobs are all in place so we can
    +                    // use the updated shardGenerations for all pending shard snapshots
    +                    shardGenerations
    +                )
    +            ));
             } catch (Exception e) {
                 logger.error(Strings.format("unexpected failure finalizing %s", snapshot), e);
                 assert false : new AssertionError("unexpected failure finalizing " + snapshot, e);
    -            handleFinalizationFailure(e, snapshot, repositoryData);
    +            handleFinalizationFailure(e, snapshot, repositoryData, ShardGenerations.EMPTY);
             }
         }
     
    @@ -1568,7 +1590,12 @@ private List> endAndGetListenersToResolve(Snapshot
          * @param snapshot       snapshot that failed to finalize
          * @param repositoryData current repository data for the snapshot's repository
          */
    -    private void handleFinalizationFailure(Exception e, Snapshot snapshot, RepositoryData repositoryData) {
    +    private void handleFinalizationFailure(
    +        Exception e,
    +        Snapshot snapshot,
    +        RepositoryData repositoryData,
    +        ShardGenerations shardGenerations
    +    ) {
             if (ExceptionsHelper.unwrap(e, NotMasterException.class, FailedToCommitClusterStateException.class) != null) {
                 // Failure due to not being master any more, don't try to remove snapshot from cluster state the next master
                 // will try ending this snapshot again
    @@ -1581,7 +1608,7 @@ private void handleFinalizationFailure(Exception e, Snapshot snapshot, Repositor
                 failAllListenersOnMasterFailOver(e);
             } else {
                 logger.warn(() -> "[" + snapshot + "] failed to finalize snapshot", e);
    -            removeFailedSnapshotFromClusterState(snapshot, e, repositoryData);
    +            removeFailedSnapshotFromClusterState(snapshot, e, repositoryData, shardGenerations);
             }
         }
     
    @@ -1701,7 +1728,7 @@ private static Tuple> read
          * @param snapshot snapshot for which to remove the snapshot operation
          * @return updated cluster state
          */
    -    public static ClusterState stateWithoutSnapshot(ClusterState state, Snapshot snapshot) {
    +    public static ClusterState stateWithoutSnapshot(ClusterState state, Snapshot snapshot, ShardGenerations shardGenerations) {
             final SnapshotsInProgress snapshots = SnapshotsInProgress.get(state);
             ClusterState result = state;
             int indexOfEntry = -1;
    @@ -1762,7 +1789,8 @@ public static ClusterState stateWithoutSnapshot(ClusterState state, Snapshot sna
                                 final ShardSnapshotStatus shardState = finishedShardEntry.getValue();
                                 final RepositoryShardId repositoryShardId = finishedShardEntry.getKey();
                                 if (shardState.state() != ShardState.SUCCESS
    -                                || previousEntry.shardsByRepoShardId().containsKey(repositoryShardId) == false) {
    +                                || previousEntry.shardsByRepoShardId().containsKey(repositoryShardId) == false
    +                                || shardGenerations.hasShardGen(finishedShardEntry.getKey()) == false) {
                                     continue;
                                 }
                                 updatedShardAssignments = maybeAddUpdatedAssignment(
    @@ -1779,7 +1807,8 @@ public static ClusterState stateWithoutSnapshot(ClusterState state, Snapshot sna
                                 .entrySet()) {
                                 final ShardSnapshotStatus shardState = finishedShardEntry.getValue();
                                 if (shardState.state() == ShardState.SUCCESS
    -                                && previousEntry.shardsByRepoShardId().containsKey(finishedShardEntry.getKey())) {
    +                                && previousEntry.shardsByRepoShardId().containsKey(finishedShardEntry.getKey())
    +                                && shardGenerations.hasShardGen(finishedShardEntry.getKey())) {
                                     updatedShardAssignments = maybeAddUpdatedAssignment(
                                         updatedShardAssignments,
                                         shardState,
    @@ -1862,13 +1891,18 @@ private static  ImmutableOpenMap.Builder maybeAddUpda
          * @param repositoryData repository data if the next finalization operation on the repository should be attempted or {@code null} if
          *                       no further actions should be executed
          */
    -    private void removeFailedSnapshotFromClusterState(Snapshot snapshot, Exception failure, @Nullable RepositoryData repositoryData) {
    +    private void removeFailedSnapshotFromClusterState(
    +        Snapshot snapshot,
    +        Exception failure,
    +        @Nullable RepositoryData repositoryData,
    +        ShardGenerations shardGenerations
    +    ) {
             assert failure != null : "Failure must be supplied";
             submitUnbatchedTask(REMOVE_SNAPSHOT_METADATA_TASK_SOURCE, new ClusterStateUpdateTask() {
     
                 @Override
                 public ClusterState execute(ClusterState currentState) {
    -                final ClusterState updatedState = stateWithoutSnapshot(currentState, snapshot);
    +                final ClusterState updatedState = stateWithoutSnapshot(currentState, snapshot, shardGenerations);
                     assert updatedState == currentState || endingSnapshots.contains(snapshot)
                         : "did not track [" + snapshot + "] in ending snapshots while removing it from the cluster state";
                     // now check if there are any delete operations that refer to the just failed snapshot and remove the snapshot from them
    diff --git a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
    index 16eb7b7b2fb0f..8a0aa2033a30e 100644
    --- a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
    +++ b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java
    @@ -85,9 +85,11 @@ protected void onCancelled() {}
         /**
          * Throws a {@link TaskCancelledException} if this task has been cancelled, otherwise does nothing.
          */
    -    public final synchronized void ensureNotCancelled() {
    +    public final void ensureNotCancelled() {
             if (isCancelled()) {
    -            throw getTaskCancelledException();
    +            synchronized (this) {
    +                throw getTaskCancelledException();
    +            }
             }
         }
     
    diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java
    index 6ab95072727c0..419f2d0726880 100644
    --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java
    +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java
    @@ -25,7 +25,6 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
     import org.elasticsearch.common.util.concurrent.ListenableFuture;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.transport.NodeDisconnectedException;
     import org.elasticsearch.transport.NodeNotConnectedException;
     import org.elasticsearch.transport.Transport;
    @@ -186,7 +185,7 @@ private void setBanOnChildConnections(
                     TransportRequestOptions.EMPTY,
                     new TransportResponseHandler.Empty() {
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    @@ -238,7 +237,7 @@ private void removeBanOnChildConnections(CancellableTask task, Collection> instruments;
    +
         @SuppressWarnings("rawtypes")
         public Collection builders() {
             return Collections.unmodifiableCollection(builders.values());
    @@ -179,7 +194,7 @@ public Collection builders() {
         );
     
         @SuppressWarnings({ "rawtypes", "unchecked" })
    -    public ThreadPool(final Settings settings, final ExecutorBuilder... customBuilders) {
    +    public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final ExecutorBuilder... customBuilders) {
             assert Node.NODE_NAME_SETTING.exists(settings);
     
             final Map builders = new HashMap<>();
    @@ -188,6 +203,7 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui
             final int halfProcMaxAt5 = halfAllocatedProcessorsMaxFive(allocatedProcessors);
             final int halfProcMaxAt10 = halfAllocatedProcessorsMaxTen(allocatedProcessors);
             final int genericThreadPoolMax = boundedBy(4 * allocatedProcessors, 128, 512);
    +        final Map> instruments = new HashMap<>();
     
             builders.put(
                 Names.GENERIC,
    @@ -228,7 +244,9 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui
                 new ScalingExecutorBuilder(Names.MANAGEMENT, 1, boundedBy(allocatedProcessors, 1, 5), TimeValue.timeValueMinutes(5), false)
             );
             builders.put(Names.FLUSH, new ScalingExecutorBuilder(Names.FLUSH, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5), false));
    -        builders.put(Names.REFRESH, new ScalingExecutorBuilder(Names.REFRESH, 1, halfProcMaxAt10, TimeValue.timeValueMinutes(5), false));
    +        // TODO: remove (or refine) this temporary stateless custom refresh pool sizing once ES-7631 is solved.
    +        final int refreshThreads = DiscoveryNode.isStateless(settings) ? allocatedProcessors : halfProcMaxAt10;
    +        builders.put(Names.REFRESH, new ScalingExecutorBuilder(Names.REFRESH, 1, refreshThreads, TimeValue.timeValueMinutes(5), false));
             builders.put(Names.WARMER, new ScalingExecutorBuilder(Names.WARMER, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5), false));
             final int maxSnapshotCores = getMaxSnapshotThreadPoolSize(allocatedProcessors);
             builders.put(Names.SNAPSHOT, new ScalingExecutorBuilder(Names.SNAPSHOT, 1, maxSnapshotCores, TimeValue.timeValueMinutes(5), false));
    @@ -304,7 +322,8 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui
     
             executors.put(Names.SAME, new ExecutorHolder(EsExecutors.DIRECT_EXECUTOR_SERVICE, new Info(Names.SAME, ThreadPoolType.DIRECT)));
             this.executors = Map.copyOf(executors);
    -
    +        this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v)));
    +        this.instruments = instruments;
             final List infos = executors.values()
                 .stream()
                 .filter(holder -> holder.info.getName().equals("same") == false)
    @@ -321,6 +340,59 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui
             this.cachedTimeThread.start();
         }
     
    +    private static ArrayList setupMetrics(MeterRegistry meterRegistry, String name, ExecutorHolder holder) {
    +        Map at = Map.of();
    +        ArrayList instruments = new ArrayList<>();
    +        if (holder.executor() instanceof ThreadPoolExecutor threadPoolExecutor) {
    +            String prefix = THREAD_POOL_METRIC_PREFIX + name;
    +            instruments.add(
    +                meterRegistry.registerLongGauge(
    +                    prefix + THREAD_POOL_METRIC_NAME_CURRENT,
    +                    "number of threads for " + name,
    +                    "count",
    +                    () -> new LongWithAttributes(threadPoolExecutor.getPoolSize(), at)
    +                )
    +            );
    +            instruments.add(
    +                meterRegistry.registerLongGauge(
    +                    prefix + THREAD_POOL_METRIC_NAME_QUEUE,
    +                    "number queue size for " + name,
    +                    "count",
    +                    () -> new LongWithAttributes(threadPoolExecutor.getQueue().size(), at)
    +                )
    +            );
    +            instruments.add(
    +                meterRegistry.registerLongGauge(
    +                    prefix + THREAD_POOL_METRIC_NAME_ACTIVE,
    +                    "number of active threads for " + name,
    +                    "count",
    +                    () -> new LongWithAttributes(threadPoolExecutor.getActiveCount(), at)
    +                )
    +            );
    +            instruments.add(
    +                meterRegistry.registerLongGauge(
    +                    prefix + THREAD_POOL_METRIC_NAME_LARGEST,
    +                    "largest pool size for " + name,
    +                    "count",
    +                    () -> new LongWithAttributes(threadPoolExecutor.getLargestPoolSize(), at)
    +                )
    +            );
    +            instruments.add(
    +                meterRegistry.registerLongAsyncCounter(
    +                    prefix + THREAD_POOL_METRIC_NAME_COMPLETED,
    +                    "number of completed threads for " + name,
    +                    "count",
    +                    () -> new LongWithAttributes(threadPoolExecutor.getCompletedTaskCount(), at)
    +                )
    +            );
    +            RejectedExecutionHandler rejectedExecutionHandler = threadPoolExecutor.getRejectedExecutionHandler();
    +            if (rejectedExecutionHandler instanceof EsRejectedExecutionHandler handler) {
    +                handler.registerCounter(meterRegistry, prefix, name);
    +            }
    +        }
    +        return instruments;
    +    }
    +
         // for subclassing by tests that don't actually use any of the machinery that the regular constructor sets up
         protected ThreadPool() {
             this.builders = Map.of();
    @@ -538,11 +610,33 @@ protected final void stopCachedTimeThread() {
             cachedTimeThread.interrupt();
         }
     
    +    private void closeMetrics(ExecutorHolder executor) {
    +        if (this.instruments.containsKey(executor.info.getName())) {
    +            this.instruments.get(executor.info.getName()).forEach((instrument) -> {
    +                if (instrument instanceof LongAsyncCounter longasynccounter) {
    +                    try {
    +                        longasynccounter.close();
    +                    } catch (Exception e) {
    +                        logger.warn(format("Failed to close LongAsyncCounter for %s. %s", executor.info.getName(), e.getMessage()), e);
    +                    }
    +                } else if (instrument instanceof LongGauge longgauge) {
    +                    try {
    +                        longgauge.close();
    +                    } catch (Exception e) {
    +                        logger.warn(format("Failed to close LongGauge for %s. %s", executor.info.getName(), e.getMessage()), e);
    +                    }
    +                }
    +            });
    +        }
    +        this.instruments.remove(executor.info.getName());
    +    }
    +
         public void shutdown() {
             stopCachedTimeThread();
             scheduler.shutdown();
             for (ExecutorHolder executor : executors.values()) {
                 if (executor.executor() instanceof ThreadPoolExecutor) {
    +                closeMetrics(executor);
                     executor.executor().shutdown();
                 }
             }
    @@ -553,6 +647,7 @@ public void shutdownNow() {
             scheduler.shutdownNow();
             for (ExecutorHolder executor : executors.values()) {
                 if (executor.executor() instanceof ThreadPoolExecutor) {
    +                closeMetrics(executor);
                     executor.executor().shutdownNow();
                 }
             }
    @@ -562,6 +657,7 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE
             boolean result = scheduler.awaitTermination(timeout, unit);
             for (ExecutorHolder executor : executors.values()) {
                 if (executor.executor() instanceof ThreadPoolExecutor) {
    +                closeMetrics(executor);
                     result &= executor.executor().awaitTermination(timeout, unit);
                 }
             }
    diff --git a/server/src/main/java/org/elasticsearch/transport/ForkingResponseHandlerRunnable.java b/server/src/main/java/org/elasticsearch/transport/ForkingResponseHandlerRunnable.java
    index 8fee438053137..121dc433862f0 100644
    --- a/server/src/main/java/org/elasticsearch/transport/ForkingResponseHandlerRunnable.java
    +++ b/server/src/main/java/org/elasticsearch/transport/ForkingResponseHandlerRunnable.java
    @@ -13,7 +13,6 @@
     import org.elasticsearch.common.util.concurrent.AbstractRunnable;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
     import org.elasticsearch.core.Nullable;
    -import org.elasticsearch.threadpool.ThreadPool;
     
     import static org.elasticsearch.core.Strings.format;
     
    @@ -30,12 +29,8 @@ abstract class ForkingResponseHandlerRunnable extends AbstractRunnable {
         @Nullable
         private final TransportException transportException;
     
    -    ForkingResponseHandlerRunnable(
    -        TransportResponseHandler handler,
    -        @Nullable TransportException transportException,
    -        ThreadPool threadPool
    -    ) {
    -        assert handler.executor(threadPool) != EsExecutors.DIRECT_EXECUTOR_SERVICE : "forking handler required, but got " + handler;
    +    ForkingResponseHandlerRunnable(TransportResponseHandler handler, @Nullable TransportException transportException) {
    +        assert handler.executor() != EsExecutors.DIRECT_EXECUTOR_SERVICE : "forking handler required, but got " + handler;
             this.handler = handler;
             this.transportException = transportException;
         }
    diff --git a/server/src/main/java/org/elasticsearch/transport/InboundHandler.java b/server/src/main/java/org/elasticsearch/transport/InboundHandler.java
    index 1686213139722..babea8c529d85 100644
    --- a/server/src/main/java/org/elasticsearch/transport/InboundHandler.java
    +++ b/server/src/main/java/org/elasticsearch/transport/InboundHandler.java
    @@ -376,7 +376,7 @@ private  void handleResponse(
             final TransportResponseHandler handler,
             final InboundMessage inboundMessage
         ) {
    -        final var executor = handler.executor(threadPool);
    +        final var executor = handler.executor();
             if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
                 // no need to provide a buffer release here, we never escape the buffer when handling directly
                 doHandleResponse(handler, remoteAddress, stream, inboundMessage.getHeader(), () -> {});
    @@ -384,7 +384,7 @@ private  void handleResponse(
                 inboundMessage.mustIncRef();
                 // release buffer once we deserialize the message, but have a fail-safe in #onAfter below in case that didn't work out
                 final Releasable releaseBuffer = Releasables.releaseOnce(inboundMessage::decRef);
    -            executor.execute(new ForkingResponseHandlerRunnable(handler, null, threadPool) {
    +            executor.execute(new ForkingResponseHandlerRunnable(handler, null) {
                     @Override
                     protected void doRun() {
                         doHandleResponse(handler, remoteAddress, stream, inboundMessage.getHeader(), releaseBuffer);
    @@ -457,11 +457,11 @@ private void handlerResponseError(StreamInput stream, InboundMessage message, fi
         }
     
         private void handleException(final TransportResponseHandler handler, TransportException transportException) {
    -        final var executor = handler.executor(threadPool);
    +        final var executor = handler.executor();
             if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
                 doHandleException(handler, transportException);
             } else {
    -            executor.execute(new ForkingResponseHandlerRunnable(handler, transportException, threadPool) {
    +            executor.execute(new ForkingResponseHandlerRunnable(handler, transportException) {
                     @Override
                     protected void doRun() {
                         doHandleException(handler, transportException);
    diff --git a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java
    index 17df2d99052aa..71a3eb4996728 100644
    --- a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java
    +++ b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java
    @@ -136,6 +136,7 @@ void sendResponse(
                 isHandshake,
                 compressionScheme
             );
    +        response.mustIncRef();
             sendMessage(channel, message, responseStatsConsumer, () -> {
                 try {
                     messageListener.onResponseSent(requestId, action, response);
    diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java
    index d85cc1d67a8b9..66d9032d2666e 100644
    --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java
    +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java
    @@ -10,17 +10,13 @@
     import org.elasticsearch.action.ActionListener;
     import org.elasticsearch.action.ActionListenerResponseHandler;
     import org.elasticsearch.action.ActionRequest;
    -import org.elasticsearch.action.ActionResponse;
    -import org.elasticsearch.action.ActionType;
    -import org.elasticsearch.client.internal.Client;
    -import org.elasticsearch.client.internal.support.AbstractClient;
    +import org.elasticsearch.action.RemoteClusterActionType;
    +import org.elasticsearch.client.internal.RemoteClusterClient;
     import org.elasticsearch.cluster.node.DiscoveryNode;
    -import org.elasticsearch.common.settings.Settings;
    -import org.elasticsearch.threadpool.ThreadPool;
     
     import java.util.concurrent.Executor;
     
    -final class RemoteClusterAwareClient extends AbstractClient {
    +final class RemoteClusterAwareClient implements RemoteClusterClient {
     
         private final TransportService service;
         private final String clusterAlias;
    @@ -28,15 +24,7 @@ final class RemoteClusterAwareClient extends AbstractClient {
         private final Executor responseExecutor;
         private final boolean ensureConnected;
     
    -    RemoteClusterAwareClient(
    -        Settings settings,
    -        ThreadPool threadPool,
    -        TransportService service,
    -        String clusterAlias,
    -        Executor responseExecutor,
    -        boolean ensureConnected
    -    ) {
    -        super(settings, threadPool);
    +    RemoteClusterAwareClient(TransportService service, String clusterAlias, Executor responseExecutor, boolean ensureConnected) {
             this.service = service;
             this.clusterAlias = clusterAlias;
             this.remoteClusterService = service.getRemoteClusterService();
    @@ -45,8 +33,8 @@ final class RemoteClusterAwareClient extends AbstractClient {
         }
     
         @Override
    -    protected  void doExecute(
    -        ActionType action,
    +    public  void execute(
    +        RemoteClusterActionType action,
             Request request,
             ActionListener listener
         ) {
    @@ -78,14 +66,9 @@ protected  void
     
         private void maybeEnsureConnected(ActionListener ensureConnectedListener) {
             if (ensureConnected) {
    -            remoteClusterService.ensureConnected(clusterAlias, ensureConnectedListener);
    +            ActionListener.run(ensureConnectedListener, l -> remoteClusterService.ensureConnected(clusterAlias, l));
             } else {
                 ensureConnectedListener.onResponse(null);
             }
         }
    -
    -    @Override
    -    public Client getRemoteClusterClient(String remoteClusterAlias, Executor responseExecutor) {
    -        return remoteClusterService.getRemoteClusterClient(threadPool(), remoteClusterAlias, responseExecutor);
    -    }
     }
    diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java
    index 58e84f5e4ef11..7a6df38addba8 100644
    --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java
    +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java
    @@ -13,33 +13,41 @@
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.settings.SecureString;
     import org.elasticsearch.common.settings.Settings;
    +import org.elasticsearch.common.util.set.Sets;
     import org.elasticsearch.core.Nullable;
     
    +import java.util.Collections;
     import java.util.Map;
    +import java.util.Set;
     
     import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS;
     
     public class RemoteClusterCredentialsManager {
    -
         private static final Logger logger = LogManager.getLogger(RemoteClusterCredentialsManager.class);
     
    -    private volatile Map clusterCredentials;
    +    private volatile Map clusterCredentials = Collections.emptyMap();
     
         @SuppressWarnings("this-escape")
         public RemoteClusterCredentialsManager(Settings settings) {
             updateClusterCredentials(settings);
         }
     
    -    public final void updateClusterCredentials(Settings settings) {
    -        clusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings);
    -        logger.debug(
    -            () -> Strings.format(
    -                "Updated remote cluster credentials for clusters: [%s]",
    -                Strings.collectionToCommaDelimitedString(clusterCredentials.keySet())
    -            )
    -        );
    +    public final synchronized UpdateRemoteClusterCredentialsResult updateClusterCredentials(Settings settings) {
    +        final Map newClusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings);
    +        if (clusterCredentials.isEmpty()) {
    +            setClusterCredentialsAndLog(newClusterCredentials);
    +            return new UpdateRemoteClusterCredentialsResult(Set.copyOf(newClusterCredentials.keySet()), Collections.emptySet());
    +        }
    +
    +        final Set addedClusterAliases = Sets.difference(newClusterCredentials.keySet(), clusterCredentials.keySet());
    +        final Set removedClusterAliases = Sets.difference(clusterCredentials.keySet(), newClusterCredentials.keySet());
    +        setClusterCredentialsAndLog(newClusterCredentials);
    +        assert Sets.haveEmptyIntersection(removedClusterAliases, addedClusterAliases);
    +        return new UpdateRemoteClusterCredentialsResult(addedClusterAliases, removedClusterAliases);
         }
     
    +    public record UpdateRemoteClusterCredentialsResult(Set addedClusterAliases, Set removedClusterAliases) {}
    +
         @Nullable
         public SecureString resolveCredentials(String clusterAlias) {
             return clusterCredentials.get(clusterAlias);
    @@ -49,5 +57,15 @@ public boolean hasCredentials(String clusterAlias) {
             return clusterCredentials.containsKey(clusterAlias);
         }
     
    +    private void setClusterCredentialsAndLog(Map newClusterCredentials) {
    +        clusterCredentials = newClusterCredentials;
    +        logger.debug(
    +            () -> Strings.format(
    +                "Updated remote cluster credentials for clusters: [%s]",
    +                Strings.collectionToCommaDelimitedString(clusterCredentials.keySet())
    +            )
    +        );
    +    }
    +
         public static final RemoteClusterCredentialsManager EMPTY = new RemoteClusterCredentialsManager(Settings.EMPTY);
     }
    diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java
    index fd5c39ec5fb1f..9e68557c05de6 100644
    --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java
    +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java
    @@ -39,7 +39,7 @@
      */
     public class RemoteClusterPortSettings {
     
    -    public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_500_061;
    +    public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_10_X;
     
         public static final String REMOTE_CLUSTER_PROFILE = "_remote_cluster";
         public static final String REMOTE_CLUSTER_PREFIX = "remote_cluster.";
    diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
    index 6bfbb95cbcfe9..ce9ca88c6158d 100644
    --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
    +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
    @@ -16,7 +16,8 @@
     import org.elasticsearch.action.support.CountDownActionListener;
     import org.elasticsearch.action.support.IndicesOptions;
     import org.elasticsearch.action.support.PlainActionFuture;
    -import org.elasticsearch.client.internal.Client;
    +import org.elasticsearch.action.support.RefCountingRunnable;
    +import org.elasticsearch.client.internal.RemoteClusterClient;
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.cluster.node.DiscoveryNodeRole;
     import org.elasticsearch.common.Strings;
    @@ -31,7 +32,7 @@
     import org.elasticsearch.core.IOUtils;
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.node.ReportingService;
    -import org.elasticsearch.threadpool.ThreadPool;
    +import org.elasticsearch.transport.RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult;
     
     import java.io.Closeable;
     import java.io.IOException;
    @@ -48,6 +49,7 @@
     import java.util.concurrent.TimeoutException;
     import java.util.function.BiFunction;
     import java.util.function.Function;
    +import java.util.function.Supplier;
     import java.util.stream.Stream;
     
     import static org.elasticsearch.common.settings.Setting.boolSetting;
    @@ -304,12 +306,53 @@ private synchronized void updateSkipUnavailable(String clusterAlias, Boolean ski
             }
         }
     
    -    public void updateRemoteClusterCredentials(Settings settings) {
    -        remoteClusterCredentialsManager.updateClusterCredentials(settings);
    +    public synchronized void updateRemoteClusterCredentials(Supplier settingsSupplier, ActionListener listener) {
    +        final Settings settings = settingsSupplier.get();
    +        final UpdateRemoteClusterCredentialsResult result = remoteClusterCredentialsManager.updateClusterCredentials(settings);
    +        // We only need to rebuild connections when a credential was newly added or removed for a cluster alias, not if the credential
    +        // value was updated. Therefore, only consider added or removed aliases
    +        final int totalConnectionsToRebuild = result.addedClusterAliases().size() + result.removedClusterAliases().size();
    +        if (totalConnectionsToRebuild == 0) {
    +            logger.debug("no connection rebuilding required after credentials update");
    +            listener.onResponse(null);
    +            return;
    +        }
    +        logger.info("rebuilding [{}] connections after credentials update", totalConnectionsToRebuild);
    +        try (var connectionRefs = new RefCountingRunnable(() -> listener.onResponse(null))) {
    +            for (var clusterAlias : result.addedClusterAliases()) {
    +                maybeRebuildConnectionOnCredentialsChange(clusterAlias, settings, connectionRefs);
    +            }
    +            for (var clusterAlias : result.removedClusterAliases()) {
    +                maybeRebuildConnectionOnCredentialsChange(clusterAlias, settings, connectionRefs);
    +            }
    +        }
         }
     
    -    public RemoteClusterCredentialsManager getRemoteClusterCredentialsManager() {
    -        return remoteClusterCredentialsManager;
    +    // package-private for testing
    +
    +    private void maybeRebuildConnectionOnCredentialsChange(String clusterAlias, Settings settings, RefCountingRunnable connectionRefs) {
    +        if (false == remoteClusters.containsKey(clusterAlias)) {
    +            // A credential was added or removed before a remote connection was configured.
    +            // Without an existing connection, there is nothing to rebuild.
    +            logger.info("no connection rebuild required for remote cluster [{}] after credentials change", clusterAlias);
    +            return;
    +        }
    +
    +        updateRemoteCluster(clusterAlias, settings, true, ActionListener.releaseAfter(new ActionListener<>() {
    +            @Override
    +            public void onResponse(RemoteClusterConnectionStatus status) {
    +                logger.info("remote cluster connection [{}] updated after credentials change: [{}]", clusterAlias, status);
    +            }
    +
    +            @Override
    +            public void onFailure(Exception e) {
    +                // We don't want to return an error to the upstream listener here since a connection rebuild failure
    +                // does *not* imply a failure to reload secure settings; however, that's how it would surface in the reload-settings call.
    +                // Instead, we log a warning which is also consistent with how we handle remote cluster settings updates (logging instead of
    +                // returning an error)
    +                logger.warn(() -> "failed to update remote cluster connection [" + clusterAlias + "] after credentials change", e);
    +            }
    +        }, connectionRefs.acquire()));
         }
     
         @Override
    @@ -346,9 +389,14 @@ public void onFailure(Exception e) {
          * @param newSettings the updated settings for the remote connection
          * @param listener a listener invoked once every configured cluster has been connected to
          */
    -    synchronized void updateRemoteCluster(
    +    void updateRemoteCluster(String clusterAlias, Settings newSettings, ActionListener listener) {
    +        updateRemoteCluster(clusterAlias, newSettings, false, listener);
    +    }
    +
    +    private synchronized void updateRemoteCluster(
             String clusterAlias,
             Settings newSettings,
    +        boolean forceRebuild,
             ActionListener listener
         ) {
             if (LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias)) {
    @@ -373,7 +421,7 @@ synchronized void updateRemoteCluster(
                 remote = new RemoteClusterConnection(finalSettings, clusterAlias, transportService, remoteClusterCredentialsManager);
                 remoteClusters.put(clusterAlias, remote);
                 remote.ensureConnected(listener.map(ignored -> RemoteClusterConnectionStatus.CONNECTED));
    -        } else if (remote.shouldRebuildConnection(newSettings)) {
    +        } else if (forceRebuild || remote.shouldRebuildConnection(newSettings)) {
                 // Changes to connection configuration. Must tear down existing connection
                 try {
                     IOUtils.close(remote);
    @@ -495,13 +543,12 @@ public void onFailure(Exception e) {
         /**
          * Returns a client to the remote cluster if the given cluster alias exists.
          *
    -     * @param threadPool       the {@link ThreadPool} for the client
          * @param clusterAlias     the cluster alias the remote cluster is registered under
          * @param responseExecutor the executor to use to process the response
          * @param ensureConnected  whether requests should wait for a connection attempt when there isn't a connection available
          * @throws IllegalArgumentException if the given clusterAlias doesn't exist
          */
    -    public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias, Executor responseExecutor, boolean ensureConnected) {
    +    public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor, boolean ensureConnected) {
             if (transportService.getRemoteClusterService().isEnabled() == false) {
                 throw new IllegalArgumentException(
                     "this node does not have the " + DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE.roleName() + " role"
    @@ -510,20 +557,18 @@ public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias,
             if (transportService.getRemoteClusterService().getRemoteClusterNames().contains(clusterAlias) == false) {
                 throw new NoSuchRemoteClusterException(clusterAlias);
             }
    -        return new RemoteClusterAwareClient(settings, threadPool, transportService, clusterAlias, responseExecutor, ensureConnected);
    +        return new RemoteClusterAwareClient(transportService, clusterAlias, responseExecutor, ensureConnected);
         }
     
         /**
          * Returns a client to the remote cluster if the given cluster alias exists.
          *
    -     * @param threadPool       the {@link ThreadPool} for the client
          * @param clusterAlias     the cluster alias the remote cluster is registered under
          * @param responseExecutor the executor to use to process the response
          * @throws IllegalArgumentException if the given clusterAlias doesn't exist
          */
    -    public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias, Executor responseExecutor) {
    +    public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
             return getRemoteClusterClient(
    -            threadPool,
                 clusterAlias,
                 responseExecutor,
                 transportService.getRemoteClusterService().isSkipUnavailable(clusterAlias) == false
    diff --git a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java
    index 0f68a58faf463..75903b5bf72ab 100644
    --- a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java
    +++ b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java
    @@ -478,7 +478,7 @@ public void handleException(TransportException exp) {
             }
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    +        public Executor executor() {
                 return managementExecutor;
             }
         }
    diff --git a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java
    index ecd4ec6e4fc1b..f8706dda458e7 100644
    --- a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java
    +++ b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java
    @@ -16,7 +16,6 @@
     import org.elasticsearch.tasks.CancellableTask;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.tasks.TaskId;
    -import org.elasticsearch.threadpool.ThreadPool;
     
     import java.io.IOException;
     import java.io.UncheckedIOException;
    @@ -58,14 +57,13 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro
                 wrappedRequest.setParentTask(taskId);
                 service.sendRequest(targetNode, action, wrappedRequest, new TransportResponseHandler<>() {
                     @Override
    -                public Executor executor(ThreadPool threadPool) {
    +                public Executor executor() {
                         return TransportResponseHandler.TRANSPORT_WORKER;
                     }
     
                     @Override
                     public void handleResponse(TransportResponse response) {
                         try {
    -                        response.mustIncRef();
                             channel.sendResponse(response);
                         } catch (IOException e) {
                             throw new UncheckedIOException(e);
    diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java
    index 61b052c957ac1..d52a31c1e3f3c 100644
    --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java
    +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java
    @@ -227,7 +227,7 @@ public HandshakeResponse read(StreamInput in) throws IOException {
             }
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    +        public Executor executor() {
                 return TransportResponseHandler.TRANSPORT_WORKER;
             }
     
    diff --git a/server/src/main/java/org/elasticsearch/transport/TransportResponse.java b/server/src/main/java/org/elasticsearch/transport/TransportResponse.java
    index 0c29b8171db77..9f4524252f515 100644
    --- a/server/src/main/java/org/elasticsearch/transport/TransportResponse.java
    +++ b/server/src/main/java/org/elasticsearch/transport/TransportResponse.java
    @@ -32,6 +32,8 @@ public TransportResponse(StreamInput in) throws IOException {
         public static class Empty extends TransportResponse {
             public static final Empty INSTANCE = new Empty();
     
    +        private Empty() {/* singleton */}
    +
             @Override
             public String toString() {
                 return "Empty{}";
    diff --git a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
    index 9ac090fc00b03..c49a567b198e7 100644
    --- a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
    +++ b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
    @@ -12,7 +12,6 @@
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
    -import org.elasticsearch.threadpool.ThreadPool;
     
     import java.util.concurrent.Executor;
     
    @@ -24,7 +23,7 @@ public interface TransportResponseHandler extends W
          * performance-critical actions, and even then only if the deserialization and handling work is very cheap, because this executor will
          * perform all the work for responses from remote nodes on the receiving transport worker itself.
          */
    -    Executor executor(ThreadPool threadPool);
    +    Executor executor();
     
         void handleResponse(T response);
     
    @@ -55,7 +54,7 @@ public void handleResponse() {
                 }
     
                 @Override
    -            public Executor executor(ThreadPool threadPool) {
    +            public Executor executor() {
                     return executor;
                 }
     
    diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java
    index 3c1907c2115e8..9d850098a8a59 100644
    --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java
    +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java
    @@ -31,6 +31,7 @@
     import org.elasticsearch.common.transport.TransportAddress;
     import org.elasticsearch.common.util.concurrent.AbstractRunnable;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
    +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
     import org.elasticsearch.common.util.concurrent.ThreadContext;
     import org.elasticsearch.core.AbstractRefCounted;
     import org.elasticsearch.core.Booleans;
    @@ -382,11 +383,11 @@ protected void doStop() {
                             holderToNotify.action(),
                             new NodeClosedException(localNode)
                         );
    -                    final var executor = handler.executor(threadPool);
    +                    final var executor = handler.executor();
                         if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
                             handler.handleException(exception);
                         } else {
    -                        executor.execute(new ForkingResponseHandlerRunnable(handler, exception, threadPool) {
    +                        executor.execute(new ForkingResponseHandlerRunnable(handler, exception) {
                                 @Override
                                 protected void doRun() {
                                     handler.handleException(exception);
    @@ -963,43 +964,82 @@ protected void handleInternalSendException(
             Exception failure
         ) {
             final Transport.ResponseContext contextToNotify = responseHandlers.remove(requestId);
    -        // If holderToNotify == null then handler has already been taken care of.
             if (contextToNotify == null) {
    +            // handler has already been completed somehow, nothing to do here
                 logger.debug("Exception while sending request, handler likely already notified due to timeout", failure);
                 return;
             }
             if (timeoutHandler != null) {
                 timeoutHandler.cancel();
             }
    -        // callback that an exception happened, but on a different thread since we don't
    -        // want handlers to worry about stack overflows. In the special case of running into a closing node we run on the current
    -        // thread on a best effort basis though.
    -        final SendRequestTransportException sendRequestException = new SendRequestTransportException(node, action, failure);
    -        final String executor = lifecycle.stoppedOrClosed() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC;
    -        threadPool.executor(executor).execute(new AbstractRunnable() {
    +        final var sendRequestException = new SendRequestTransportException(node, action, failure);
    +        final var handler = contextToNotify.handler();
    +        final var executor = getInternalSendExceptionExecutor(handler.executor());
    +        executor.execute(new AbstractRunnable() {
                 @Override
    -            public void onRejection(Exception e) {
    -                // if we get rejected during node shutdown we don't wanna bubble it up
    -                logger.debug(() -> format("failed to notify response handler on rejection, action: %s", contextToNotify.action()), e);
    +            protected void doRun() {
    +                if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
    +                    tracerLog.trace("[{}][{}] failed to send request to node [{}]", requestId, action, node);
    +                }
    +                try {
    +                    handler.handleException(sendRequestException);
    +                } catch (Exception e) {
    +                    assert false : e;
    +                    if (e != sendRequestException) {
    +                        e.addSuppressed(sendRequestException);
    +                    }
    +                    logger.error(
    +                        Strings.format(
    +                            "[%d][%s] failed to notify handler [%s] of failure to send request to node [%s]",
    +                            requestId,
    +                            action,
    +                            handler,
    +                            node
    +                        ),
    +                        e
    +                    );
    +                    // indicates a bug in the handler but there's not much else we can do about it now, just carry on
    +                }
                 }
     
                 @Override
                 public void onFailure(Exception e) {
    -                logger.warn(() -> format("failed to notify response handler on exception, action: %s", contextToNotify.action()), e);
    +                assert false : e;
    +                logger.error(() -> format("failed to notify response handler on exception, action: %s", contextToNotify.action()), e);
                 }
     
                 @Override
    -            protected void doRun() {
    -                if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
    -                    tracerLog.trace("[{}][{}] failed to send request to node [{}]", requestId, action, node);
    +            public boolean isForceExecution() {
    +                return true; // must complete every waiting listener
    +            }
    +
    +            @Override
    +            public void onRejection(Exception e) {
    +                if (e != sendRequestException) {
    +                    sendRequestException.addSuppressed(e);
                     }
    -                contextToNotify.handler().handleException(sendRequestException);
    +                // force-execution means we won't be rejected unless we're shutting down
    +                assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e;
    +                // in this case it's better to complete the handler on the calling thread rather than leaking it
    +                doRun();
                 }
             });
         }
     
    +    private Executor getInternalSendExceptionExecutor(Executor handlerExecutor) {
    +        if (lifecycle.stoppedOrClosed()) {
    +            // too late to try and dispatch anywhere else, let's just use the calling thread
    +            return EsExecutors.DIRECT_EXECUTOR_SERVICE;
    +        } else if (handlerExecutor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
    +            // if the handler is non-forking then dispatch to GENERIC to avoid a possible stack overflow
    +            return threadPool.generic();
    +        } else {
    +            return handlerExecutor;
    +        }
    +    }
    +
         private void sendLocalRequest(long requestId, final String action, final TransportRequest request, TransportRequestOptions options) {
    -        final DirectResponseChannel channel = new DirectResponseChannel(localNode, action, requestId, this, threadPool);
    +        final DirectResponseChannel channel = new DirectResponseChannel(localNode, action, requestId, this);
             try {
                 onRequestSent(localNode, requestId, action, request, options);
                 onRequestReceived(requestId, action);
    @@ -1437,8 +1477,8 @@ public void handleException(TransportException exp) {
             }
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    -            return delegate.executor(threadPool);
    +        public Executor executor() {
    +            return delegate.executor();
             }
     
             @Override
    @@ -1465,14 +1505,12 @@ static class DirectResponseChannel implements TransportChannel {
             private final String action;
             private final long requestId;
             final TransportService service;
    -        final ThreadPool threadPool;
     
    -        DirectResponseChannel(DiscoveryNode localNode, String action, long requestId, TransportService service, ThreadPool threadPool) {
    +        DirectResponseChannel(DiscoveryNode localNode, String action, long requestId, TransportService service) {
                 this.localNode = localNode;
                 this.action = action;
                 this.requestId = requestId;
                 this.service = service;
    -            this.threadPool = threadPool;
             }
     
             @Override
    @@ -1482,43 +1520,39 @@ public String getProfileName() {
     
             @Override
             public void sendResponse(TransportResponse response) throws IOException {
    -            try {
    -                service.onResponseSent(requestId, action, response);
    -                try (var shutdownBlock = service.pendingDirectHandlers.withRef()) {
    -                    if (shutdownBlock == null) {
    -                        // already shutting down, the handler will be completed by sendRequestInternal or doStop
    -                        return;
    -                    }
    -                    final TransportResponseHandler handler = service.responseHandlers.onResponseReceived(requestId, service);
    -                    if (handler == null) {
    -                        // handler already completed, likely by a timeout which is logged elsewhere
    -                        return;
    -                    }
    -                    final var executor = handler.executor(threadPool);
    -                    if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
    -                        processResponse(handler, response);
    -                    } else {
    -                        response.mustIncRef();
    -                        executor.execute(new ForkingResponseHandlerRunnable(handler, null, threadPool) {
    -                            @Override
    -                            protected void doRun() {
    -                                processResponse(handler, response);
    -                            }
    +            service.onResponseSent(requestId, action, response);
    +            try (var shutdownBlock = service.pendingDirectHandlers.withRef()) {
    +                if (shutdownBlock == null) {
    +                    // already shutting down, the handler will be completed by sendRequestInternal or doStop
    +                    return;
    +                }
    +                final TransportResponseHandler handler = service.responseHandlers.onResponseReceived(requestId, service);
    +                if (handler == null) {
    +                    // handler already completed, likely by a timeout which is logged elsewhere
    +                    return;
    +                }
    +                final var executor = handler.executor();
    +                if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
    +                    processResponse(handler, response);
    +                } else {
    +                    response.mustIncRef();
    +                    executor.execute(new ForkingResponseHandlerRunnable(handler, null) {
    +                        @Override
    +                        protected void doRun() {
    +                            processResponse(handler, response);
    +                        }
     
    -                            @Override
    -                            public void onAfter() {
    -                                response.decRef();
    -                            }
    +                        @Override
    +                        public void onAfter() {
    +                            response.decRef();
    +                        }
     
    -                            @Override
    -                            public String toString() {
    -                                return "delivery of response to [" + requestId + "][" + action + "]: " + response;
    -                            }
    -                        });
    -                    }
    +                        @Override
    +                        public String toString() {
    +                            return "delivery of response to [" + requestId + "][" + action + "]: " + response;
    +                        }
    +                    });
                     }
    -            } finally {
    -                response.decRef();
                 }
             }
     
    @@ -1545,11 +1579,11 @@ public void sendResponse(Exception exception) throws IOException {
                         return;
                     }
                     final RemoteTransportException rtx = wrapInRemote(exception);
    -                final var executor = handler.executor(threadPool);
    +                final var executor = handler.executor();
                     if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
                         processException(handler, rtx);
                     } else {
    -                    executor.execute(new ForkingResponseHandlerRunnable(handler, rtx, threadPool) {
    +                    executor.execute(new ForkingResponseHandlerRunnable(handler, rtx) {
                             @Override
                             protected void doRun() {
                                 processException(handler, rtx);
    @@ -1718,8 +1752,8 @@ public void handleException(TransportException exp) {
             }
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    -            return handler.executor(threadPool);
    +        public Executor executor() {
    +            return handler.executor();
             }
     
             @Override
    diff --git a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java
    index 75ab5db982235..04a0b3434814a 100644
    --- a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java
    +++ b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java
    @@ -18,22 +18,17 @@
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
    -import org.elasticsearch.core.Tuple;
    -import org.elasticsearch.xcontent.ConstructingObjectParser;
     import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.ToXContent;
    -import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
     import java.util.Collections;
     import java.util.EnumSet;
     import java.util.HashMap;
     import java.util.Iterator;
    -import java.util.List;
     import java.util.Map;
     import java.util.Objects;
     import java.util.TreeMap;
    -import java.util.stream.Collectors;
     
     /**
      * Holds the results of the most recent attempt to migrate system indices. Updated by {@link SystemIndexMigrator} as it finishes each
    @@ -43,25 +38,7 @@ public class FeatureMigrationResults implements Metadata.Custom {
         public static final String TYPE = "system_index_migration";
         public static final TransportVersion MIGRATION_ADDED_VERSION = TransportVersions.V_8_0_0;
     
    -    private static final ParseField RESULTS_FIELD = new ParseField("results");
    -
    -    @SuppressWarnings("unchecked")
    -    public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TYPE, a -> {
    -        final Map statuses = ((List>) a[0]).stream()
    -            .collect(Collectors.toMap(Tuple::v1, Tuple::v2));
    -        return new FeatureMigrationResults(statuses);
    -    });
    -
    -    static {
    -        PARSER.declareNamedObjects(
    -            ConstructingObjectParser.constructorArg(),
    -            (p, c, n) -> new Tuple<>(n, SingleFeatureMigrationResult.fromXContent(p)),
    -            v -> {
    -                throw new IllegalArgumentException("ordered " + RESULTS_FIELD.getPreferredName() + " are not supported");
    -            },
    -            RESULTS_FIELD
    -        );
    -    }
    +    static final ParseField RESULTS_FIELD = new ParseField("results");
     
         private final Map featureStatuses;
     
    @@ -83,10 +60,6 @@ public Iterator toXContentChunked(ToXContent.Params ignore
             return ChunkedToXContentHelper.xContentValuesMap(RESULTS_FIELD.getPreferredName(), featureStatuses);
         }
     
    -    public static FeatureMigrationResults fromXContent(XContentParser parser) {
    -        return PARSER.apply(parser, null);
    -    }
    -
         /**
          * Gets a map of feature name to that feature's status. Only contains features which have either been migrated successfully or
          * failed to migrate.
    diff --git a/server/src/main/java/org/elasticsearch/upgrades/SingleFeatureMigrationResult.java b/server/src/main/java/org/elasticsearch/upgrades/SingleFeatureMigrationResult.java
    index db1c325dfbb7f..24ed1943ed04e 100644
    --- a/server/src/main/java/org/elasticsearch/upgrades/SingleFeatureMigrationResult.java
    +++ b/server/src/main/java/org/elasticsearch/upgrades/SingleFeatureMigrationResult.java
    @@ -14,11 +14,9 @@
     import org.elasticsearch.common.io.stream.StreamOutput;
     import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.core.Nullable;
    -import org.elasticsearch.xcontent.ConstructingObjectParser;
     import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.ToXContentObject;
     import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
     import java.util.Objects;
    @@ -27,10 +25,9 @@
      * Holds the results of migrating a single feature. See also {@link FeatureMigrationResults}.
      */
     public class SingleFeatureMigrationResult implements SimpleDiffable, Writeable, ToXContentObject {
    -    private static final String NAME = "feature_migration_status";
    -    private static final ParseField SUCCESS_FIELD = new ParseField("successful");
    -    private static final ParseField FAILED_INDEX_NAME_FIELD = new ParseField("failed_index");
    -    private static final ParseField EXCEPTION_FIELD = new ParseField("exception");
    +    static final ParseField SUCCESS_FIELD = new ParseField("successful");
    +    static final ParseField FAILED_INDEX_NAME_FIELD = new ParseField("failed_index");
    +    static final ParseField EXCEPTION_FIELD = new ParseField("exception");
     
         private final boolean successful;
         @Nullable
    @@ -38,23 +35,7 @@ public class SingleFeatureMigrationResult implements SimpleDiffable PARSER = new ConstructingObjectParser<>(
    -        NAME,
    -        a -> new SingleFeatureMigrationResult((boolean) a[0], (String) a[1], (Exception) a[2])
    -    );
    -
    -    static {
    -        PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), SUCCESS_FIELD);
    -        PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FAILED_INDEX_NAME_FIELD);
    -        PARSER.declareObject(
    -            ConstructingObjectParser.optionalConstructorArg(),
    -            (p, c) -> ElasticsearchException.fromXContent(p),
    -            EXCEPTION_FIELD
    -        );
    -    }
    -
    -    private SingleFeatureMigrationResult(boolean successful, String failedIndexName, Exception exception) {
    +    SingleFeatureMigrationResult(boolean successful, String failedIndexName, Exception exception) {
             this.successful = successful;
             if (successful == false) {
                 Objects.requireNonNull(failedIndexName, "failed index name must be present for failed feature migration statuses");
    @@ -75,10 +56,6 @@ private SingleFeatureMigrationResult(boolean successful, String failedIndexName,
             }
         }
     
    -    public static SingleFeatureMigrationResult fromXContent(XContentParser parser) {
    -        return PARSER.apply(parser, null);
    -    }
    -
         /**
          * Creates a record indicating that migration succeeded.
          */
    diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv
    new file mode 100644
    index 0000000000000..ad2c89d18b70a
    --- /dev/null
    +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv
    @@ -0,0 +1,111 @@
    +7.0.0,7000099
    +7.0.1,7000199
    +7.1.0,7010099
    +7.1.1,7010199
    +7.2.0,7020099
    +7.2.1,7020199
    +7.3.0,7030099
    +7.3.1,7030199
    +7.3.2,7030299
    +7.4.0,7040099
    +7.4.1,7040199
    +7.4.2,7040299
    +7.5.0,7050099
    +7.5.1,7050199
    +7.5.2,7050299
    +7.6.0,7060099
    +7.6.1,7060199
    +7.6.2,7060299
    +7.7.0,7070099
    +7.7.1,7070199
    +7.8.0,7080099
    +7.8.1,7080199
    +7.9.0,7090099
    +7.9.1,7090199
    +7.9.2,7090299
    +7.9.3,7090399
    +7.10.0,7100099
    +7.10.1,7100199
    +7.10.2,7100299
    +7.11.0,7110099
    +7.11.1,7110199
    +7.11.2,7110299
    +7.12.0,7120099
    +7.12.1,7120199
    +7.13.0,7130099
    +7.13.1,7130199
    +7.13.2,7130299
    +7.13.3,7130399
    +7.13.4,7130499
    +7.14.0,7140099
    +7.14.1,7140199
    +7.14.2,7140299
    +7.15.0,7150099
    +7.15.1,7150199
    +7.15.2,7150299
    +7.16.0,7160099
    +7.16.1,7160199
    +7.16.2,7160299
    +7.16.3,7160399
    +7.17.0,7170099
    +7.17.1,7170199
    +7.17.2,7170299
    +7.17.3,7170399
    +7.17.4,7170499
    +7.17.5,7170599
    +7.17.6,7170699
    +7.17.7,7170799
    +7.17.8,7170899
    +7.17.9,7170999
    +7.17.10,7171099
    +7.17.11,7171199
    +7.17.12,7171299
    +7.17.13,7171399
    +7.17.14,7171499
    +7.17.15,7171599
    +7.17.16,7171699
    +7.17.17,7171799
    +8.0.0,8000099
    +8.0.1,8000199
    +8.1.0,8010099
    +8.1.1,8010199
    +8.1.2,8010299
    +8.1.3,8010399
    +8.2.0,8020099
    +8.2.1,8020199
    +8.2.2,8020299
    +8.2.3,8020399
    +8.3.0,8030099
    +8.3.1,8030199
    +8.3.2,8030299
    +8.3.3,8030399
    +8.4.0,8040099
    +8.4.1,8040199
    +8.4.2,8040299
    +8.4.3,8040399
    +8.5.0,8050099
    +8.5.1,8050199
    +8.5.2,8050299
    +8.5.3,8050399
    +8.6.0,8060099
    +8.6.1,8060199
    +8.6.2,8060299
    +8.7.0,8070099
    +8.7.1,8070199
    +8.8.0,8080099
    +8.8.1,8080199
    +8.8.2,8080299
    +8.9.0,8500020
    +8.9.1,8500020
    +8.9.2,8500020
    +8.10.0,8500061
    +8.10.1,8500061
    +8.10.2,8500061
    +8.10.3,8500061
    +8.10.4,8500061
    +8.11.0,8512001
    +8.11.1,8512001
    +8.11.2,8512001
    +8.11.3,8512001
    +8.11.4,8512001
    +8.12.0,8560000
    diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json
    index d8b4ed1ff93c9..46e32300e70fd 100644
    --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json
    +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json
    @@ -29,5 +29,6 @@
       "BOOTSTRAP_CHECK_ROLE_MAPPINGS": "bootstrap-checks-xpack.html#_role_mappings_check",
       "BOOTSTRAP_CHECK_TLS": "bootstrap-checks-xpack.html#bootstrap-checks-tls",
       "BOOTSTRAP_CHECK_TOKEN_SSL": "bootstrap-checks-xpack.html#_token_ssl_check",
    -  "BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP": "security-minimal-setup.html"
    +  "BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP": "security-minimal-setup.html",
    +  "CONTACT_SUPPORT": "troubleshooting.html#troubleshooting-contact-support"
     }
    diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv
    new file mode 100644
    index 0000000000000..644cc362d3d4c
    --- /dev/null
    +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv
    @@ -0,0 +1,111 @@
    +7.0.0,7000099
    +7.0.1,7000199
    +7.1.0,7010099
    +7.1.1,7010199
    +7.2.0,7020099
    +7.2.1,7020199
    +7.3.0,7030099
    +7.3.1,7030199
    +7.3.2,7030299
    +7.4.0,7040099
    +7.4.1,7040199
    +7.4.2,7040299
    +7.5.0,7050099
    +7.5.1,7050199
    +7.5.2,7050299
    +7.6.0,7060099
    +7.6.1,7060199
    +7.6.2,7060299
    +7.7.0,7070099
    +7.7.1,7070199
    +7.8.0,7080099
    +7.8.1,7080199
    +7.9.0,7090099
    +7.9.1,7090199
    +7.9.2,7090299
    +7.9.3,7090399
    +7.10.0,7100099
    +7.10.1,7100199
    +7.10.2,7100299
    +7.11.0,7110099
    +7.11.1,7110199
    +7.11.2,7110299
    +7.12.0,7120099
    +7.12.1,7120199
    +7.13.0,7130099
    +7.13.1,7130199
    +7.13.2,7130299
    +7.13.3,7130399
    +7.13.4,7130499
    +7.14.0,7140099
    +7.14.1,7140199
    +7.14.2,7140299
    +7.15.0,7150099
    +7.15.1,7150199
    +7.15.2,7150299
    +7.16.0,7160099
    +7.16.1,7160199
    +7.16.2,7160299
    +7.16.3,7160399
    +7.17.0,7170099
    +7.17.1,7170199
    +7.17.2,7170299
    +7.17.3,7170399
    +7.17.4,7170499
    +7.17.5,7170599
    +7.17.6,7170699
    +7.17.7,7170799
    +7.17.8,7170899
    +7.17.9,7170999
    +7.17.10,7171099
    +7.17.11,7171199
    +7.17.12,7171299
    +7.17.13,7171399
    +7.17.14,7171499
    +7.17.15,7171599
    +7.17.16,7171699
    +7.17.17,7171799
    +8.0.0,8000099
    +8.0.1,8000199
    +8.1.0,8010099
    +8.1.1,8010199
    +8.1.2,8010299
    +8.1.3,8010399
    +8.2.0,8020099
    +8.2.1,8020199
    +8.2.2,8020299
    +8.2.3,8020399
    +8.3.0,8030099
    +8.3.1,8030199
    +8.3.2,8030299
    +8.3.3,8030399
    +8.4.0,8040099
    +8.4.1,8040199
    +8.4.2,8040299
    +8.4.3,8040399
    +8.5.0,8050099
    +8.5.1,8050199
    +8.5.2,8050299
    +8.5.3,8050399
    +8.6.0,8060099
    +8.6.1,8060199
    +8.6.2,8060299
    +8.7.0,8070099
    +8.7.1,8070199
    +8.8.0,8080099
    +8.8.1,8080199
    +8.8.2,8080299
    +8.9.0,8090099
    +8.9.1,8090199
    +8.9.2,8090299
    +8.10.0,8100099
    +8.10.1,8100199
    +8.10.2,8100299
    +8.10.3,8100399
    +8.10.4,8100499
    +8.11.0,8500003
    +8.11.1,8500003
    +8.11.2,8500003
    +8.11.3,8500003
    +8.11.4,8500003
    +8.12.0,8500008
    diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
    index 6400eef6f9e34..2268c95312716 100644
    --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
    +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
    @@ -43,6 +43,7 @@
     import org.elasticsearch.search.internal.ShardSearchContextId;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.transport.RemoteTransportException;
    +import org.elasticsearch.xcontent.ObjectPath;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContent;
     import org.elasticsearch.xcontent.XContentBuilder;
    @@ -67,8 +68,11 @@
     import static org.hamcrest.CoreMatchers.hasItem;
     import static org.hamcrest.CoreMatchers.hasItems;
     import static org.hamcrest.Matchers.equalTo;
    +import static org.hamcrest.Matchers.hasEntry;
     import static org.hamcrest.Matchers.hasSize;
     import static org.hamcrest.Matchers.instanceOf;
    +import static org.hamcrest.Matchers.not;
    +import static org.hamcrest.Matchers.nullValue;
     import static org.hamcrest.Matchers.startsWith;
     
     public class ElasticsearchExceptionTests extends ESTestCase {
    @@ -724,6 +728,48 @@ public void testToXContentWithHeadersAndMetadata() throws IOException {
             );
         }
     
    +    @SuppressWarnings("unchecked")
    +    public void testToXContentWithObjectCycles() throws Exception {
    +        ElasticsearchException root = new ElasticsearchException("root exception");
    +
    +        ElasticsearchException suppressed1 = new ElasticsearchException("suppressed#1", root);
    +
    +        ElasticsearchException suppressed2 = new ElasticsearchException("suppressed#2");
    +        ElasticsearchException suppressed3 = new ElasticsearchException("suppressed#3");
    +        suppressed3.addSuppressed(suppressed2);
    +        suppressed2.addSuppressed(suppressed3);
    +
    +        root.addSuppressed(suppressed1);
    +        root.addSuppressed(suppressed2);
    +        root.addSuppressed(suppressed3);
    +
    +        // Because we support up to 100 nested exceptions, this JSON ends up very long.
    +        // Rather than assert the full content, we check that
    +        // (a) it generated successfully (no StackOverflowErrors)
    +        BytesReference xContent = XContentHelper.toXContent(root, XContentType.JSON, randomBoolean());
    +        // (b) it's valid JSON
    +        final Map map = XContentHelper.convertToMap(xContent, false, XContentType.JSON).v2();
    +        // (c) it contains the right content
    +        assertThat(ObjectPath.eval("type", map), equalTo("exception"));
    +        assertThat(ObjectPath.eval("reason", map), equalTo("root exception"));
    +        assertThat(ObjectPath.eval("suppressed.0.reason", map), equalTo("suppressed#1"));
    +        assertThat(ObjectPath.eval("suppressed.0.caused_by.reason", map), equalTo("root exception"));
    +        assertThat(ObjectPath.eval("suppressed.0.caused_by.suppressed.0.reason", map), equalTo("suppressed#1"));
    +        assertThat(ObjectPath.eval("suppressed.1.reason", map), equalTo("suppressed#2"));
    +        assertThat(ObjectPath.eval("suppressed.1.suppressed.0.reason", map), equalTo("suppressed#3"));
    +        assertThat(ObjectPath.eval("suppressed.1.suppressed.0.suppressed.0.reason", map), equalTo("suppressed#2"));
    +        assertThat(ObjectPath.eval("suppressed.2.reason", map), equalTo("suppressed#3"));
    +        assertThat(ObjectPath.eval("suppressed.2.suppressed.0.reason", map), equalTo("suppressed#2"));
    +        assertThat(ObjectPath.eval("suppressed.2.suppressed.0.suppressed.0.reason", map), equalTo("suppressed#3"));
    +
    +        String tailExceptionPath = ".suppressed.0.caused_by".repeat(50).substring(1) + ".suppressed.0";
    +        final Object tailException = ObjectPath.eval(tailExceptionPath, map);
    +        assertThat(tailException, not(nullValue()));
    +        assertThat(tailException, instanceOf(Map.class));
    +        assertThat((Map) tailException, hasEntry("reason", "too many nested exceptions"));
    +        assertThat((Map) tailException, hasEntry("type", "illegal_state_exception"));
    +    }
    +
         public void testFromXContent() throws IOException {
             final XContent xContent = randomFrom(XContentType.values()).xContent();
             XContentBuilder builder = XContentBuilder.builder(xContent)
    diff --git a/server/src/test/java/org/elasticsearch/ReleaseVersionsTests.java b/server/src/test/java/org/elasticsearch/ReleaseVersionsTests.java
    new file mode 100644
    index 0000000000000..4ed262da07407
    --- /dev/null
    +++ b/server/src/test/java/org/elasticsearch/ReleaseVersionsTests.java
    @@ -0,0 +1,35 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch;
    +
    +import org.elasticsearch.test.ESTestCase;
    +
    +import java.util.function.IntFunction;
    +
    +import static org.hamcrest.Matchers.equalTo;
    +
    +public class ReleaseVersionsTests extends ESTestCase {
    +
    +    public void testReleaseVersions() {
    +        IntFunction versions = ReleaseVersions.generateVersionsLookup(ReleaseVersionsTests.class);
    +
    +        assertThat(versions.apply(10), equalTo("8.0.0"));
    +        assertThat(versions.apply(14), equalTo("8.1.0-8.1.1"));
    +        assertThat(versions.apply(21), equalTo("8.2.0"));
    +        assertThat(versions.apply(22), equalTo("8.2.1"));
    +    }
    +
    +    public void testReturnsRange() {
    +        IntFunction versions = ReleaseVersions.generateVersionsLookup(ReleaseVersionsTests.class);
    +
    +        assertThat(versions.apply(17), equalTo("8.1.2-8.2.0"));
    +        expectThrows(AssertionError.class, () -> versions.apply(9));
    +        assertThat(versions.apply(24), equalTo("8.2.2-snapshot[24]"));
    +    }
    +}
    diff --git a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
    index 6be145c6e9e33..3bdf5814878a7 100644
    --- a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java
    @@ -8,6 +8,7 @@
     package org.elasticsearch.action;
     
     import org.apache.lucene.store.AlreadyClosedException;
    +import org.elasticsearch.ElasticsearchException;
     import org.elasticsearch.action.support.PlainActionFuture;
     import org.elasticsearch.common.util.concurrent.AbstractRunnable;
     import org.elasticsearch.core.Assertions;
    @@ -503,6 +504,77 @@ private static void completeListener(boolean successResponse, ActionListener();
    +        final var successResult = new Object();
    +        ActionListener.run(successFuture, l -> l.onResponse(successResult));
    +        assertTrue(successFuture.isDone());
    +        assertSame(successResult, successFuture.get());
    +
    +        final var failFuture = new PlainActionFuture<>();
    +        final var failException = new ElasticsearchException("simulated");
    +        ActionListener.run(failFuture, l -> {
    +            if (randomBoolean()) {
    +                l.onFailure(failException);
    +            } else {
    +                throw failException;
    +            }
    +        });
    +        assertTrue(failFuture.isDone());
    +        assertSame(failException, expectThrows(ExecutionException.class, ElasticsearchException.class, failFuture::get));
    +    }
    +
    +    public void testRunWithResource() {
    +        final var future = new PlainActionFuture<>();
    +        final var successResult = new Object();
    +        final var failException = new ElasticsearchException("simulated");
    +        final var resourceIsClosed = new AtomicBoolean(false);
    +        ActionListener.runWithResource(ActionListener.runBefore(future, () -> assertTrue(resourceIsClosed.get())), () -> new Releasable() {
    +            @Override
    +            public void close() {
    +                assertTrue(resourceIsClosed.compareAndSet(false, true));
    +            }
    +
    +            @Override
    +            public String toString() {
    +                return "test releasable";
    +            }
    +        }, (l, r) -> {
    +            assertFalse(resourceIsClosed.get());
    +            assertEquals("test releasable", r.toString());
    +            if (randomBoolean()) {
    +                l.onResponse(successResult);
    +            } else {
    +                if (randomBoolean()) {
    +                    l.onFailure(failException);
    +                } else {
    +                    throw failException;
    +                }
    +            }
    +        });
    +
    +        assertTrue(future.isDone());
    +        try {
    +            assertSame(successResult, future.get());
    +        } catch (ExecutionException e) {
    +            assertSame(failException, e.getCause());
    +        } catch (InterruptedException e) {
    +            fail(e);
    +        }
    +
    +        final var failureFuture = new PlainActionFuture<>();
    +        ActionListener.runWithResource(
    +            failureFuture,
    +            () -> { throw new ElasticsearchException("resource creation failure"); },
    +            (l, r) -> fail("should not be called")
    +        );
    +        assertTrue(failureFuture.isDone());
    +        assertEquals(
    +            "resource creation failure",
    +            expectThrows(ExecutionException.class, ElasticsearchException.class, failureFuture::get).getMessage()
    +        );
    +    }
    +
         public void testReleaseAfter() {
             runReleaseAfterTest(true, false);
             runReleaseAfterTest(true, true);
    diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
    index a076537bb7351..5dd85f9ee35d5 100644
    --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java
    @@ -15,6 +15,7 @@
     import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
     import org.elasticsearch.cluster.node.DiscoveryNodes;
     import org.elasticsearch.cluster.service.ClusterService;
    +import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.ClusterSettings;
     import org.elasticsearch.common.settings.IndexScopedSettings;
     import org.elasticsearch.common.settings.Settings;
    @@ -30,6 +31,7 @@
     import org.elasticsearch.rest.RestChannel;
     import org.elasticsearch.rest.RestController;
     import org.elasticsearch.rest.RestHandler;
    +import org.elasticsearch.rest.RestInterceptor;
     import org.elasticsearch.rest.RestRequest;
     import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction;
     import org.elasticsearch.tasks.Task;
    @@ -45,7 +47,6 @@
     import java.util.Arrays;
     import java.util.List;
     import java.util.function.Supplier;
    -import java.util.function.UnaryOperator;
     
     import static java.util.Collections.emptyList;
     import static java.util.Collections.singletonList;
    @@ -91,7 +92,7 @@ protected FakeTransportAction(String actionName, ActionFilters actionFilters, Ta
                 @Override
                 protected void doExecute(Task task, FakeRequest request, ActionListener listener) {}
             }
    -        final var action = new ActionType<>("fake", null);
    +        final var action = new ActionType<>("fake");
             ActionPlugin registersFakeAction = new ActionPlugin() {
                 @Override
                 public List> getActions() {
    @@ -110,6 +111,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() {
             ActionModule actionModule = new ActionModule(
                 settings.getSettings(),
                 TestIndexNameExpressionResolver.newInstance(),
    +            null,
                 settings.getIndexScopedSettings(),
                 settings.getClusterSettings(),
                 settings.getSettingsFilter(),
    @@ -147,6 +149,7 @@ public void testPluginCantOverwriteBuiltinRestHandler() throws IOException {
                 @Override
                 public List getRestHandlers(
                     Settings settings,
    +                NamedWriteableRegistry namedWriteableRegistry,
                     RestController restController,
                     ClusterSettings clusterSettings,
                     IndexScopedSettings indexScopedSettings,
    @@ -171,6 +174,7 @@ public String getName() {
                 ActionModule actionModule = new ActionModule(
                     settings.getSettings(),
                     TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
    +                null,
                     settings.getIndexScopedSettings(),
                     settings.getClusterSettings(),
                     settings.getSettingsFilter(),
    @@ -207,6 +211,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c
                 @Override
                 public List getRestHandlers(
                     Settings settings,
    +                NamedWriteableRegistry namedWriteableRegistry,
                     RestController restController,
                     ClusterSettings clusterSettings,
                     IndexScopedSettings indexScopedSettings,
    @@ -225,6 +230,7 @@ public List getRestHandlers(
                 ActionModule actionModule = new ActionModule(
                     settings.getSettings(),
                     TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
    +                null,
                     settings.getIndexScopedSettings(),
                     settings.getClusterSettings(),
                     settings.getSettingsFilter(),
    @@ -274,6 +280,7 @@ public void test3rdPartyHandlerIsNotInstalled() {
                     () -> new ActionModule(
                         settingsModule.getSettings(),
                         TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
    +                    null,
                         settingsModule.getIndexScopedSettings(),
                         settingsModule.getClusterSettings(),
                         settingsModule.getSettingsFilter(),
    @@ -314,6 +321,7 @@ public void test3rdPartyRestControllerIsNotInstalled() {
                     () -> new ActionModule(
                         settingsModule.getSettings(),
                         TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
    +                    null,
                         settingsModule.getIndexScopedSettings(),
                         settingsModule.getClusterSettings(),
                         settingsModule.getSettingsFilter(),
    @@ -362,9 +370,9 @@ class SecPlugin implements ActionPlugin, RestServerActionPlugin {
             }
     
             @Override
    -        public UnaryOperator getRestHandlerInterceptor(ThreadContext threadContext) {
    +        public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) {
                 if (installInterceptor) {
    -                return UnaryOperator.identity();
    +                return (request, channel, targetHandler, listener) -> listener.onResponse(true);
                 } else {
                     return null;
                 }
    @@ -372,14 +380,14 @@ public UnaryOperator getRestHandlerInterceptor(ThreadContext thread
     
             @Override
             public RestController getRestController(
    -            UnaryOperator handlerWrapper,
    +            RestInterceptor interceptor,
                 NodeClient client,
                 CircuitBreakerService circuitBreakerService,
                 UsageService usageService,
                 Tracer tracer
             ) {
                 if (installController) {
    -                return new RestController(handlerWrapper, client, circuitBreakerService, usageService, tracer);
    +                return new RestController(interceptor, client, circuitBreakerService, usageService, tracer);
                 } else {
                     return null;
                 }
    diff --git a/server/src/test/java/org/elasticsearch/action/ActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java
    index 4874972314ee6..dd8ab675652fc 100644
    --- a/server/src/test/java/org/elasticsearch/action/ActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java
    @@ -13,9 +13,9 @@
     public class ActionTests extends ESTestCase {
     
         public void testEquals() {
    -        final var fakeAction1 = ActionType.localOnly("a");
    -        final var fakeAction2 = ActionType.localOnly("a");
    -        final var fakeAction3 = ActionType.localOnly("b");
    +        final var fakeAction1 = new ActionType<>("a");
    +        final var fakeAction2 = new ActionType<>("a");
    +        final var fakeAction3 = new ActionType<>("b");
             String s = "Some random other object";
             assertEquals(fakeAction1, fakeAction1);
             assertEquals(fakeAction2, fakeAction2);
    diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java
    index 0f0e4d91da990..ad44d74cf1778 100644
    --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java
    @@ -85,7 +85,7 @@ public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOExce
             ) {
                 // DocWriteResponse is abstract so we have to sneak a subclass in here to test it.
             };
    -        response.setShardInfo(new ShardInfo(1, 1));
    +        response.setShardInfo(ShardInfo.allSuccessful(1));
             response.setForcedRefresh(false);
             try (XContentBuilder builder = JsonXContent.contentBuilder()) {
                 response.toXContent(builder, ToXContent.EMPTY_PARAMS);
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java
    index a598e58ada75f..69cd9b4026108 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java
    @@ -190,8 +190,7 @@ public void testGetDesiredBalance() throws Exception {
                                 ShardRoutingState.STARTED
                             );
                             if (nodeIds.size() > 1) {
    -                            shard = TestShardRouting.relocate(
    -                                shard,
    +                            shard = shard.relocate(
                                     randomValueOtherThan(nodeId, () -> randomFrom(nodeIds)),
                                     ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE
                                 );
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
    index 5f6540d46c719..02ec4dc508c0b 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
    @@ -531,7 +531,7 @@ public ActionResponse.Empty read(StreamInput in) {
                 }
     
                 @Override
    -            public Executor executor(ThreadPool threadPool) {
    +            public Executor executor() {
                     return TransportResponseHandler.TRANSPORT_WORKER;
                 }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java
    index 17a22ff8e82fd..22aa5e9869afa 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java
    @@ -218,7 +218,7 @@ public ActionResponse.Empty read(StreamInput in) {
                 }
     
                 @Override
    -            public Executor executor(ThreadPool threadPool) {
    +            public Executor executor() {
                     return TransportResponseHandler.TRANSPORT_WORKER;
                 }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java
    index 38c811d367560..9c7fa266a0762 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java
    @@ -69,6 +69,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA
                             clusterFormationState.lastCommittedConfiguration(),
                             clusterFormationState.resolvedAddresses(),
                             clusterFormationState.foundPeers(),
    +                        clusterFormationState.mastersOfPeers(),
                             clusterFormationState.currentTerm(),
                             clusterFormationState.hasDiscoveredQuorum(),
                             clusterFormationState.statusInfo(),
    @@ -88,6 +89,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA
                             clusterFormationState.lastCommittedConfiguration(),
                             clusterFormationState.resolvedAddresses(),
                             clusterFormationState.foundPeers(),
    +                        clusterFormationState.mastersOfPeers(),
                             clusterFormationState.currentTerm(),
                             clusterFormationState.hasDiscoveredQuorum(),
                             clusterFormationState.statusInfo(),
    @@ -107,6 +109,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA
                             clusterFormationState.lastCommittedConfiguration(),
                             clusterFormationState.resolvedAddresses(),
                             clusterFormationState.foundPeers(),
    +                        clusterFormationState.mastersOfPeers(),
                             clusterFormationState.currentTerm(),
                             clusterFormationState.hasDiscoveredQuorum() == false,
                             clusterFormationState.statusInfo(),
    @@ -148,6 +151,7 @@ private ClusterFormationFailureHelper.ClusterFormationState getClusterFormationS
                 new CoordinationMetadata.VotingConfiguration(Collections.emptySet()),
                 Collections.emptyList(),
                 Collections.emptyList(),
    +            Collections.emptySet(),
                 randomLong(),
                 randomBoolean(),
                 new StatusInfo(randomFrom(StatusInfo.Status.HEALTHY, StatusInfo.Status.UNHEALTHY), randomAlphaOfLength(20)),
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java
    index 00f46d8c42bf0..3d88e4e6d6a09 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java
    @@ -56,7 +56,6 @@ public void testWriteBlocks() {
                 threadPool,
                 mock(ActionFilters.class),
                 mock(IndexNameExpressionResolver.class),
    -            l -> {},
                 mock(AllocationService.class)
             );
     
    @@ -85,7 +84,6 @@ public void testNoBlocks() {
                 threadPool,
                 mock(ActionFilters.class),
                 mock(IndexNameExpressionResolver.class),
    -            l -> {},
                 mock(AllocationService.class)
             );
     
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequestTests.java
    index b9d2edb108683..df963829810e5 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequestTests.java
    @@ -8,7 +8,6 @@
     
     package org.elasticsearch.action.admin.cluster.desirednodes;
     
    -import org.elasticsearch.Version;
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.cluster.metadata.DesiredNode;
     import org.elasticsearch.common.settings.Settings;
    @@ -46,14 +45,13 @@ private DesiredNode hotDesiredNode() {
                 .build();
     
             if (randomBoolean()) {
    -            return new DesiredNode(settings, randomFloat(), ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            return new DesiredNode(settings, randomFloat(), ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
             } else {
                 return new DesiredNode(
                     settings,
                     new DesiredNode.ProcessorsRange(1, randomBoolean() ? null : (double) 1),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 );
             }
         }
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
    index 0f4c01c674b1a..d4231c9f7538b 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
    @@ -23,23 +23,118 @@
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.rest.RestStatus;
     import org.elasticsearch.test.AbstractXContentSerializingTestCase;
    +import org.elasticsearch.xcontent.ConstructingObjectParser;
    +import org.elasticsearch.xcontent.ObjectParser;
    +import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentParser;
     import org.hamcrest.Matchers;
     
     import java.io.IOException;
     import java.util.Collections;
    +import java.util.List;
     import java.util.Locale;
     import java.util.Map;
     import java.util.function.Predicate;
     import java.util.regex.Pattern;
     
    +import static java.util.Collections.emptyMap;
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
     import static org.hamcrest.CoreMatchers.allOf;
     import static org.hamcrest.Matchers.greaterThanOrEqualTo;
     import static org.hamcrest.Matchers.is;
     import static org.hamcrest.Matchers.lessThanOrEqualTo;
     
     public class ClusterHealthResponsesTests extends AbstractXContentSerializingTestCase {
    +
    +    private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    +        "cluster_health_response",
    +        true,
    +        parsedObjects -> {
    +            int i = 0;
    +            // ClusterStateHealth fields
    +            int numberOfNodes = (int) parsedObjects[i++];
    +            int numberOfDataNodes = (int) parsedObjects[i++];
    +            int activeShards = (int) parsedObjects[i++];
    +            int relocatingShards = (int) parsedObjects[i++];
    +            int activePrimaryShards = (int) parsedObjects[i++];
    +            int initializingShards = (int) parsedObjects[i++];
    +            int unassignedShards = (int) parsedObjects[i++];
    +            double activeShardsPercent = (double) parsedObjects[i++];
    +            String statusStr = (String) parsedObjects[i++];
    +            ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr);
    +            @SuppressWarnings("unchecked")
    +            List indexList = (List) parsedObjects[i++];
    +            final Map indices;
    +            if (indexList == null || indexList.isEmpty()) {
    +                indices = emptyMap();
    +            } else {
    +                indices = Maps.newMapWithExpectedSize(indexList.size());
    +                for (ClusterIndexHealth indexHealth : indexList) {
    +                    indices.put(indexHealth.getIndex(), indexHealth);
    +                }
    +            }
    +            ClusterStateHealth stateHealth = new ClusterStateHealth(
    +                activePrimaryShards,
    +                activeShards,
    +                relocatingShards,
    +                initializingShards,
    +                unassignedShards,
    +                numberOfNodes,
    +                numberOfDataNodes,
    +                activeShardsPercent,
    +                status,
    +                indices
    +            );
    +
    +            // ClusterHealthResponse fields
    +            String clusterName = (String) parsedObjects[i++];
    +            int numberOfPendingTasks = (int) parsedObjects[i++];
    +            int numberOfInFlightFetch = (int) parsedObjects[i++];
    +            int delayedUnassignedShards = (int) parsedObjects[i++];
    +            long taskMaxWaitingTimeMillis = (long) parsedObjects[i++];
    +            boolean timedOut = (boolean) parsedObjects[i];
    +            return new ClusterHealthResponse(
    +                clusterName,
    +                numberOfPendingTasks,
    +                numberOfInFlightFetch,
    +                delayedUnassignedShards,
    +                TimeValue.timeValueMillis(taskMaxWaitingTimeMillis),
    +                timedOut,
    +                stateHealth
    +            );
    +        }
    +    );
    +
    +    private static final ObjectParser.NamedObjectParser INDEX_PARSER = (
    +        XContentParser parser,
    +        Void context,
    +        String index) -> ClusterIndexHealth.innerFromXContent(parser, index);
    +
    +    static {
    +        // ClusterStateHealth fields
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_NODES));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_DATA_NODES));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_SHARDS));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.RELOCATING_SHARDS));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_PRIMARY_SHARDS));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.INITIALIZING_SHARDS));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.UNASSIGNED_SHARDS));
    +        PARSER.declareDouble(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_SHARDS_PERCENT_AS_NUMBER));
    +        PARSER.declareString(constructorArg(), new ParseField(ClusterHealthResponse.STATUS));
    +        // Can be absent if LEVEL == 'cluster'
    +        PARSER.declareNamedObjects(optionalConstructorArg(), INDEX_PARSER, new ParseField(ClusterHealthResponse.INDICES));
    +
    +        // ClusterHealthResponse fields
    +        PARSER.declareString(constructorArg(), new ParseField(ClusterHealthResponse.CLUSTER_NAME));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_PENDING_TASKS));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_IN_FLIGHT_FETCH));
    +        PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.DELAYED_UNASSIGNED_SHARDS));
    +        PARSER.declareLong(constructorArg(), new ParseField(ClusterHealthResponse.TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS));
    +        PARSER.declareBoolean(constructorArg(), new ParseField(ClusterHealthResponse.TIMED_OUT));
    +    }
    +
         private final ClusterStatsLevel level = randomFrom(ClusterStatsLevel.values());
     
         public void testIsTimeout() {
    @@ -102,7 +197,7 @@ ClusterHealthResponse maybeSerialize(ClusterHealthResponse clusterHealth) throws
     
         @Override
         protected ClusterHealthResponse doParseInstance(XContentParser parser) {
    -        return ClusterHealthResponse.fromXContent(parser);
    +        return PARSER.apply(parser, null);
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
    index 345f85470a056..adefd71f93590 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
    @@ -14,7 +14,6 @@
     import org.elasticsearch.action.ActionListener;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
    -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
     import org.elasticsearch.action.support.ActionTestUtils;
    @@ -289,7 +288,7 @@ public void onFailure(Exception e) {
             request.setReason("Testing Cancellation");
             request.setTargetTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId()));
             // And send the cancellation request to a random node
    -        CancelTasksResponse response = ActionTestUtils.executeBlocking(
    +        ListTasksResponse response = ActionTestUtils.executeBlocking(
                 testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction,
                 request
             );
    @@ -368,7 +367,7 @@ public void onFailure(Exception e) {
             request.setReason("Testing Cancellation");
             request.setTargetParentTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId()));
             // And send the cancellation request to a random node
    -        CancelTasksResponse response = ActionTestUtils.executeBlocking(
    +        ListTasksResponse response = ActionTestUtils.executeBlocking(
                 testNodes[randomIntBetween(1, testNodes.length - 1)].transportCancelTasksAction,
                 request
             );
    @@ -487,7 +486,7 @@ public void onFailure(Exception e) {
                     request.setReason("Testing Cancellation");
                     request.setTargetTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId()));
                     // And send the cancellation request to a random node
    -                CancelTasksResponse response = ActionTestUtils.executeBlocking(testNodes[0].transportCancelTasksAction, request);
    +                ListTasksResponse response = ActionTestUtils.executeBlocking(testNodes[0].transportCancelTasksAction, request);
                     logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster");
                     // This node still thinks that's part of the cluster, so cancelling should look successful
                     assertThat(response.getTasks().size(), lessThanOrEqualTo(1));
    @@ -544,7 +543,7 @@ public void testNonExistingTaskCancellation() throws Exception {
                 randomSubsetOf(randomIntBetween(1, testNodes.length - 1), testNodes).stream().map(TestNode::getNodeId).toArray(String[]::new)
             );
             // And send the cancellation request to a random node
    -        CancelTasksResponse response = ActionTestUtils.executeBlocking(
    +        ListTasksResponse response = ActionTestUtils.executeBlocking(
                 testNodes[randomIntBetween(1, testNodes.length - 1)].transportCancelTasksAction,
                 request
             );
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
    index 581473eb074be..d714105d9a13a 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
    @@ -71,11 +71,8 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin, NetworkPlugi
     
         private static final Logger logger = LogManager.getLogger(TestTaskPlugin.class);
     
    -    public static final ActionType TEST_TASK_ACTION = ActionType.localOnly("cluster:admin/tasks/test");
    -    public static final ActionType UNBLOCK_TASK_ACTION = new ActionType<>(
    -        "cluster:admin/tasks/testunblock",
    -        UnblockTestTasksResponse::new
    -    );
    +    public static final ActionType TEST_TASK_ACTION = new ActionType<>("cluster:admin/tasks/test");
    +    public static final ActionType UNBLOCK_TASK_ACTION = new ActionType<>("cluster:admin/tasks/testunblock");
     
         @Override
         public List> getActions() {
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
    index 86ccd9807cf9f..7168b2c1edcdd 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
    @@ -13,7 +13,6 @@
     import org.elasticsearch.action.FailedNodeException;
     import org.elasticsearch.action.TaskOperationFailure;
     import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
    -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
     import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
     import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
    @@ -521,7 +520,7 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception {
             request.setNodes(testNodes[0].getNodeId());
             request.setReason("Testing Cancellation");
             request.setActions(actionName);
    -        CancelTasksResponse response = ActionTestUtils.executeBlocking(
    +        ListTasksResponse response = ActionTestUtils.executeBlocking(
                 testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction,
                 request
             );
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java
    index ff70e7e6756ed..f8d3871fbfa8f 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java
    @@ -38,7 +38,7 @@ protected GetScriptLanguageResponse createTestInstance() {
     
         @Override
         protected GetScriptLanguageResponse doParseInstance(XContentParser parser) throws IOException {
    -        return GetScriptLanguageResponse.fromXContent(parser);
    +        return new GetScriptLanguageResponse(ScriptLanguagesInfo.fromXContent(parser));
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java
    index 500080fa9f118..05820c071052c 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java
    @@ -12,6 +12,8 @@
     import org.elasticsearch.script.Script;
     import org.elasticsearch.script.StoredScriptSource;
     import org.elasticsearch.test.AbstractXContentSerializingTestCase;
    +import org.elasticsearch.xcontent.ConstructingObjectParser;
    +import org.elasticsearch.xcontent.ObjectParser;
     import org.elasticsearch.xcontent.XContentParser;
     import org.elasticsearch.xcontent.XContentType;
     
    @@ -20,11 +22,41 @@
     import java.util.Map;
     import java.util.function.Predicate;
     
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
    +
     public class GetStoredScriptResponseTests extends AbstractXContentSerializingTestCase {
     
    +    private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    +        "GetStoredScriptResponse",
    +        true,
    +        (a, c) -> {
    +            String id = (String) a[0];
    +            boolean found = (Boolean) a[1];
    +            StoredScriptSource scriptSource = (StoredScriptSource) a[2];
    +            return found ? new GetStoredScriptResponse(id, scriptSource) : new GetStoredScriptResponse(id, null);
    +        }
    +    );
    +
    +    static {
    +        PARSER.declareField(constructorArg(), (p, c) -> p.text(), GetStoredScriptResponse._ID_PARSE_FIELD, ObjectParser.ValueType.STRING);
    +        PARSER.declareField(
    +            constructorArg(),
    +            (p, c) -> p.booleanValue(),
    +            GetStoredScriptResponse.FOUND_PARSE_FIELD,
    +            ObjectParser.ValueType.BOOLEAN
    +        );
    +        PARSER.declareField(
    +            optionalConstructorArg(),
    +            (p, c) -> StoredScriptSource.fromXContent(p, true),
    +            GetStoredScriptResponse.SCRIPT,
    +            ObjectParser.ValueType.OBJECT
    +        );
    +    }
    +
         @Override
         protected GetStoredScriptResponse doParseInstance(XContentParser parser) throws IOException {
    -        return GetStoredScriptResponse.fromXContent(parser);
    +        return PARSER.apply(parser, null);
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java
    deleted file mode 100644
    index 4f3b14cd986c1..0000000000000
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java
    +++ /dev/null
    @@ -1,33 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.action.admin.indices.cache.clear;
    -
    -import org.elasticsearch.action.support.DefaultShardOperationFailedException;
    -import org.elasticsearch.test.AbstractBroadcastResponseTestCase;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.util.List;
    -
    -public class ClearIndicesCacheResponseTests extends AbstractBroadcastResponseTestCase {
    -
    -    @Override
    -    protected ClearIndicesCacheResponse createTestInstance(
    -        int totalShards,
    -        int successfulShards,
    -        int failedShards,
    -        List failures
    -    ) {
    -        return new ClearIndicesCacheResponse(totalShards, successfulShards, failedShards, failures);
    -    }
    -
    -    @Override
    -    protected ClearIndicesCacheResponse doParseInstance(XContentParser parser) {
    -        return ClearIndicesCacheResponse.fromXContent(parser);
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java
    deleted file mode 100644
    index 62611060ce25d..0000000000000
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java
    +++ /dev/null
    @@ -1,33 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.action.admin.indices.flush;
    -
    -import org.elasticsearch.action.support.DefaultShardOperationFailedException;
    -import org.elasticsearch.test.AbstractBroadcastResponseTestCase;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.util.List;
    -
    -public class FlushResponseTests extends AbstractBroadcastResponseTestCase {
    -
    -    @Override
    -    protected FlushResponse createTestInstance(
    -        int totalShards,
    -        int successfulShards,
    -        int failedShards,
    -        List failures
    -    ) {
    -        return new FlushResponse(totalShards, successfulShards, failedShards, failures);
    -    }
    -
    -    @Override
    -    protected FlushResponse doParseInstance(XContentParser parser) {
    -        return FlushResponse.fromXContent(parser);
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java
    deleted file mode 100644
    index ed1160edeb8f5..0000000000000
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java
    +++ /dev/null
    @@ -1,32 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.action.admin.indices.forcemerge;
    -
    -import org.elasticsearch.action.support.DefaultShardOperationFailedException;
    -import org.elasticsearch.test.AbstractBroadcastResponseTestCase;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.util.List;
    -
    -public class ForceMergeResponseTests extends AbstractBroadcastResponseTestCase {
    -    @Override
    -    protected ForceMergeResponse createTestInstance(
    -        int totalShards,
    -        int successfulShards,
    -        int failedShards,
    -        List failures
    -    ) {
    -        return new ForceMergeResponse(totalShards, successfulShards, failedShards, failures);
    -    }
    -
    -    @Override
    -    protected ForceMergeResponse doParseInstance(XContentParser parser) {
    -        return ForceMergeResponse.fromXContent(parser);
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java
    deleted file mode 100644
    index 5a3183b3e61b9..0000000000000
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java
    +++ /dev/null
    @@ -1,33 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.action.admin.indices.refresh;
    -
    -import org.elasticsearch.action.support.DefaultShardOperationFailedException;
    -import org.elasticsearch.test.AbstractBroadcastResponseTestCase;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -import java.util.List;
    -
    -public class RefreshResponseTests extends AbstractBroadcastResponseTestCase {
    -
    -    @Override
    -    protected RefreshResponse createTestInstance(
    -        int totalShards,
    -        int successfulShards,
    -        int failedShards,
    -        List failures
    -    ) {
    -        return new RefreshResponse(totalShards, successfulShards, failedShards, failures);
    -    }
    -
    -    @Override
    -    protected RefreshResponse doParseInstance(XContentParser parser) {
    -        return RefreshResponse.fromXContent(parser);
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java
    index 6c9297bb41ae0..86968bda62d91 100644
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java
    @@ -11,6 +11,7 @@
     import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.common.settings.IndexScopedSettings;
     import org.elasticsearch.common.settings.Settings;
    +import org.elasticsearch.common.xcontent.XContentParserUtils;
     import org.elasticsearch.index.RandomCreateIndexGenerator;
     import org.elasticsearch.test.AbstractChunkedSerializingTestCase;
     import org.elasticsearch.xcontent.XContentParser;
    @@ -18,6 +19,7 @@
     import java.io.IOException;
     import java.util.HashMap;
     import java.util.HashSet;
    +import java.util.Map;
     import java.util.Set;
     import java.util.function.Predicate;
     
    @@ -70,7 +72,58 @@ protected Writeable.Reader instanceReader() {
     
         @Override
         protected GetSettingsResponse doParseInstance(XContentParser parser) throws IOException {
    -        return GetSettingsResponse.fromXContent(parser);
    +        HashMap indexToSettings = new HashMap<>();
    +        HashMap indexToDefaultSettings = new HashMap<>();
    +
    +        if (parser.currentToken() == null) {
    +            parser.nextToken();
    +        }
    +        XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
    +        parser.nextToken();
    +
    +        while (parser.isClosed() == false) {
    +            if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
    +                // we must assume this is an index entry
    +                parseIndexEntry(parser, indexToSettings, indexToDefaultSettings);
    +            } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
    +                parser.skipChildren();
    +            } else {
    +                parser.nextToken();
    +            }
    +        }
    +
    +        return new GetSettingsResponse(Map.copyOf(indexToSettings), Map.copyOf(indexToDefaultSettings));
    +    }
    +
    +    private static void parseIndexEntry(
    +        XContentParser parser,
    +        Map indexToSettings,
    +        Map indexToDefaultSettings
    +    ) throws IOException {
    +        String indexName = parser.currentName();
    +        parser.nextToken();
    +        while (parser.isClosed() == false && parser.currentToken() != XContentParser.Token.END_OBJECT) {
    +            parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings);
    +        }
    +    }
    +
    +    private static void parseSettingsField(
    +        XContentParser parser,
    +        String currentIndexName,
    +        Map indexToSettings,
    +        Map indexToDefaultSettings
    +    ) throws IOException {
    +
    +        if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
    +            switch (parser.currentName()) {
    +                case "settings" -> indexToSettings.put(currentIndexName, Settings.fromXContent(parser));
    +                case "defaults" -> indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser));
    +                default -> parser.skipChildren();
    +            }
    +        } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
    +            parser.skipChildren();
    +        }
    +        parser.nextToken();
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java
    deleted file mode 100644
    index b9f3e8b89a214..0000000000000
    --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java
    +++ /dev/null
    @@ -1,63 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.action.admin.indices.shrink;
    -
    -import org.elasticsearch.common.Strings;
    -import org.elasticsearch.common.io.stream.Writeable;
    -import org.elasticsearch.test.AbstractXContentSerializingTestCase;
    -import org.elasticsearch.xcontent.XContentParser;
    -
    -public class ResizeResponseTests extends AbstractXContentSerializingTestCase {
    -
    -    public void testToXContent() {
    -        ResizeResponse response = new ResizeResponse(true, false, "index_name");
    -        String output = Strings.toString(response);
    -        assertEquals("""
    -            {"acknowledged":true,"shards_acknowledged":false,"index":"index_name"}""", output);
    -    }
    -
    -    @Override
    -    protected ResizeResponse doParseInstance(XContentParser parser) {
    -        return ResizeResponse.fromXContent(parser);
    -    }
    -
    -    @Override
    -    protected ResizeResponse createTestInstance() {
    -        boolean acknowledged = randomBoolean();
    -        boolean shardsAcknowledged = acknowledged && randomBoolean();
    -        String index = randomAlphaOfLength(5);
    -        return new ResizeResponse(acknowledged, shardsAcknowledged, index);
    -    }
    -
    -    @Override
    -    protected Writeable.Reader instanceReader() {
    -        return ResizeResponse::new;
    -    }
    -
    -    @Override
    -    protected ResizeResponse mutateInstance(ResizeResponse response) {
    -        if (randomBoolean()) {
    -            if (randomBoolean()) {
    -                boolean acknowledged = response.isAcknowledged() == false;
    -                boolean shardsAcknowledged = acknowledged && response.isShardsAcknowledged();
    -                return new ResizeResponse(acknowledged, shardsAcknowledged, response.index());
    -            } else {
    -                boolean shardsAcknowledged = response.isShardsAcknowledged() == false;
    -                boolean acknowledged = shardsAcknowledged || response.isAcknowledged();
    -                return new ResizeResponse(acknowledged, shardsAcknowledged, response.index());
    -            }
    -        } else {
    -            return new ResizeResponse(
    -                response.isAcknowledged(),
    -                response.isShardsAcknowledged(),
    -                response.index() + randomAlphaOfLengthBetween(2, 5)
    -            );
    -        }
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java
    index ccf9681d3680b..76b1fa0011540 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java
    @@ -13,15 +13,18 @@
     import org.elasticsearch.action.DocWriteRequest;
     import org.elasticsearch.action.DocWriteResponse;
     import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
    +import org.elasticsearch.action.delete.DeleteResponse;
     import org.elasticsearch.action.delete.DeleteResponseTests;
     import org.elasticsearch.action.index.IndexResponse;
     import org.elasticsearch.action.index.IndexResponseTests;
     import org.elasticsearch.action.update.UpdateResponse;
     import org.elasticsearch.action.update.UpdateResponseTests;
     import org.elasticsearch.common.bytes.BytesReference;
    +import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.core.Tuple;
     import org.elasticsearch.index.shard.ShardId;
    +import org.elasticsearch.rest.RestStatus;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentBuilder;
    @@ -34,6 +37,8 @@
     
     import static org.elasticsearch.ElasticsearchExceptionTests.assertDeepEquals;
     import static org.elasticsearch.ElasticsearchExceptionTests.randomExceptions;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
     import static org.hamcrest.Matchers.containsString;
     
     public class BulkItemResponseTests extends ESTestCase {
    @@ -93,7 +98,7 @@ public void testToAndFromXContent() throws IOException {
                 BulkItemResponse parsedBulkItemResponse;
                 try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
                     assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
    -                parsedBulkItemResponse = BulkItemResponse.fromXContent(parser, bulkItemId);
    +                parsedBulkItemResponse = itemResponseFromXContent(parser, bulkItemId);
                     assertNull(parser.nextToken());
                 }
                 assertBulkItemResponse(expectedBulkItemResponse, parsedBulkItemResponse);
    @@ -127,7 +132,7 @@ public void testFailureToAndFromXContent() throws IOException {
             BulkItemResponse parsedBulkItemResponse;
             try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
                 assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
    -            parsedBulkItemResponse = BulkItemResponse.fromXContent(parser, itemId);
    +            parsedBulkItemResponse = itemResponseFromXContent(parser, itemId);
                 assertNull(parser.nextToken());
             }
             assertBulkItemResponse(expectedBulkItemResponse, parsedBulkItemResponse);
    @@ -161,4 +166,78 @@ public static void assertBulkItemResponse(BulkItemResponse expected, BulkItemRes
                 }
             }
         }
    +
    +    /**
    +     * Reads a {@link BulkItemResponse} from a {@link XContentParser}.
    +     *
    +     * @param parser the {@link XContentParser}
    +     * @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of
    +     *           the item in the {@link BulkResponse#getItems} array.
    +     */
    +    public static BulkItemResponse itemResponseFromXContent(XContentParser parser, int id) throws IOException {
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
    +
    +        XContentParser.Token token = parser.nextToken();
    +        ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    +
    +        String currentFieldName = parser.currentName();
    +        token = parser.nextToken();
    +
    +        final DocWriteRequest.OpType opType = DocWriteRequest.OpType.fromString(currentFieldName);
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    +
    +        DocWriteResponse.Builder builder = null;
    +        CheckedConsumer itemParser = null;
    +
    +        if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
    +            final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder();
    +            builder = indexResponseBuilder;
    +            itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder);
    +
    +        } else if (opType == DocWriteRequest.OpType.UPDATE) {
    +            final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder();
    +            builder = updateResponseBuilder;
    +            itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder);
    +
    +        } else if (opType == DocWriteRequest.OpType.DELETE) {
    +            final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder();
    +            builder = deleteResponseBuilder;
    +            itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder);
    +        } else {
    +            throwUnknownField(currentFieldName, parser);
    +        }
    +
    +        RestStatus status = null;
    +        ElasticsearchException exception = null;
    +        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +            if (token == XContentParser.Token.FIELD_NAME) {
    +                currentFieldName = parser.currentName();
    +            }
    +
    +            if (BulkItemResponse.ERROR.equals(currentFieldName)) {
    +                if (token == XContentParser.Token.START_OBJECT) {
    +                    exception = ElasticsearchException.fromXContent(parser);
    +                }
    +            } else if (BulkItemResponse.STATUS.equals(currentFieldName)) {
    +                if (token == XContentParser.Token.VALUE_NUMBER) {
    +                    status = RestStatus.fromCode(parser.intValue());
    +                }
    +            } else {
    +                itemParser.accept(parser);
    +            }
    +        }
    +
    +        ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser);
    +        token = parser.nextToken();
    +        ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser);
    +
    +        BulkItemResponse bulkItemResponse;
    +        if (exception != null) {
    +            Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status);
    +            bulkItemResponse = BulkItemResponse.failure(id, opType, failure);
    +        } else {
    +            bulkItemResponse = BulkItemResponse.success(id, opType, builder.build());
    +        }
    +        return bulkItemResponse;
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java
    index 0002b8554a451..5cd1fde9edd9b 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java
    @@ -16,17 +16,17 @@
     import org.elasticsearch.index.shard.ShardId;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xcontent.XContentType;
    -import org.hamcrest.Matchers;
     
     import java.util.ArrayList;
     import java.util.Arrays;
    +import java.util.Collections;
     import java.util.HashSet;
     import java.util.List;
     import java.util.Set;
     
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.is;
    -import static org.hamcrest.Matchers.nullValue;
    +import static org.hamcrest.Matchers.sameInstance;
     
     public class BulkRequestModifierTests extends ESTestCase {
     
    @@ -36,37 +36,51 @@ public void testBulkRequestModifier() {
             for (int i = 0; i < numRequests; i++) {
                 bulkRequest.add(new IndexRequest("_index").id(String.valueOf(i)).source("{}", XContentType.JSON));
             }
    -        CaptureActionListener actionListener = new CaptureActionListener();
    -        TransportBulkAction.BulkRequestModifier bulkRequestModifier = new TransportBulkAction.BulkRequestModifier(bulkRequest);
     
    -        int i = 0;
    +        // wrap the bulk request and fail some of the item requests at random
    +        TransportBulkAction.BulkRequestModifier modifier = new TransportBulkAction.BulkRequestModifier(bulkRequest);
             Set failedSlots = new HashSet<>();
    -        while (bulkRequestModifier.hasNext()) {
    -            bulkRequestModifier.next();
    +        for (int i = 0; modifier.hasNext(); i++) {
    +            modifier.next();
                 if (randomBoolean()) {
    -                bulkRequestModifier.markItemAsFailed(i, new RuntimeException());
    +                modifier.markItemAsFailed(i, new RuntimeException());
                     failedSlots.add(i);
                 }
    -            i++;
    +        }
    +        assertThat(modifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size()));
    +
    +        // populate the non-failed responses
    +        BulkRequest subsequentBulkRequest = modifier.getBulkRequest();
    +        assertThat(subsequentBulkRequest.requests().size(), equalTo(numRequests - failedSlots.size()));
    +        List responses = new ArrayList<>();
    +        for (int j = 0; j < subsequentBulkRequest.requests().size(); j++) {
    +            IndexRequest indexRequest = (IndexRequest) subsequentBulkRequest.requests().get(j);
    +            IndexResponse indexResponse = new IndexResponse(new ShardId("_index", "_na_", 0), indexRequest.id(), 1, 17, 1, true);
    +            responses.add(BulkItemResponse.success(j, indexRequest.opType(), indexResponse));
             }
     
    -        assertThat(bulkRequestModifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size()));
    -        // simulate that we actually executed the modified bulk request:
    +        // simulate that we actually executed the modified bulk request
             long ingestTook = randomLong();
    -        ActionListener result = bulkRequestModifier.wrapActionListenerIfNeeded(ingestTook, actionListener);
    -        result.onResponse(new BulkResponse(new BulkItemResponse[numRequests - failedSlots.size()], 0));
    +        CaptureActionListener actionListener = new CaptureActionListener();
    +        ActionListener result = modifier.wrapActionListenerIfNeeded(ingestTook, actionListener);
    +        result.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[0]), 0));
     
    +        // check the results for successes and failures
             BulkResponse bulkResponse = actionListener.getResponse();
             assertThat(bulkResponse.getIngestTookInMillis(), equalTo(ingestTook));
    -        for (int j = 0; j < bulkResponse.getItems().length; j++) {
    -            if (failedSlots.contains(j)) {
    -                BulkItemResponse item = bulkResponse.getItems()[j];
    +        for (int i = 0; i < bulkResponse.getItems().length; i++) {
    +            BulkItemResponse item = bulkResponse.getItems()[i];
    +            if (failedSlots.contains(i)) {
                     assertThat(item.isFailed(), is(true));
    -                assertThat(item.getFailure().getIndex(), equalTo("_index"));
    -                assertThat(item.getFailure().getId(), equalTo(String.valueOf(j)));
    -                assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException"));
    +                BulkItemResponse.Failure failure = item.getFailure();
    +                assertThat(failure.getIndex(), equalTo("_index"));
    +                assertThat(failure.getId(), equalTo(String.valueOf(i)));
    +                assertThat(failure.getMessage(), equalTo("java.lang.RuntimeException"));
                 } else {
    -                assertThat(bulkResponse.getItems()[j], nullValue());
    +                assertThat(item.isFailed(), is(false));
    +                IndexResponse success = item.getResponse();
    +                assertThat(success.getIndex(), equalTo("_index"));
    +                assertThat(success.getId(), equalTo(String.valueOf(i)));
                 }
             }
         }
    @@ -78,16 +92,29 @@ public void testPipelineFailures() {
             }
     
             TransportBulkAction.BulkRequestModifier modifier = new TransportBulkAction.BulkRequestModifier(originalBulkRequest);
    +
    +        final List failures = new ArrayList<>();
    +        // iterate the requests in order, recording that half of them should be failures
             for (int i = 0; modifier.hasNext(); i++) {
                 modifier.next();
                 if (i % 2 == 0) {
    -                modifier.markItemAsFailed(i, new RuntimeException());
    +                failures.add(i);
                 }
             }
     
    +        // with async processors, the failures can come back 'out of order' so sometimes we'll shuffle the list
    +        if (randomBoolean()) {
    +            Collections.shuffle(failures, random());
    +        }
    +
    +        // actually mark the failures
    +        for (int i : failures) {
    +            modifier.markItemAsFailed(i, new RuntimeException());
    +        }
    +
             // So half of the requests have "failed", so only the successful requests are left:
             BulkRequest bulkRequest = modifier.getBulkRequest();
    -        assertThat(bulkRequest.requests().size(), Matchers.equalTo(16));
    +        assertThat(bulkRequest.requests().size(), equalTo(16));
     
             List responses = new ArrayList<>();
             ActionListener bulkResponseListener = modifier.wrapActionListenerIfNeeded(1L, new ActionListener<>() {
    @@ -106,11 +133,11 @@ public void onFailure(Exception e) {}
                 IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.id(), 1, 17, 1, true);
                 originalResponses.add(BulkItemResponse.success(Integer.parseInt(indexRequest.id()), indexRequest.opType(), indexResponse));
             }
    -        bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[originalResponses.size()]), 0));
    +        bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[0]), 0));
     
    -        assertThat(responses.size(), Matchers.equalTo(32));
    +        assertThat(responses.size(), equalTo(32));
             for (int i = 0; i < 32; i++) {
    -            assertThat(responses.get(i).getId(), Matchers.equalTo(String.valueOf(i)));
    +            assertThat(responses.get(i).getId(), equalTo(String.valueOf(i)));
             }
         }
     
    @@ -126,7 +153,7 @@ public void testNoFailures() {
             }
     
             BulkRequest bulkRequest = modifier.getBulkRequest();
    -        assertThat(bulkRequest, Matchers.sameInstance(originalBulkRequest));
    +        assertThat(bulkRequest, sameInstance(originalBulkRequest));
             assertThat(modifier.wrapActionListenerIfNeeded(1L, ActionListener.noop()), ActionListenerTests.isMappedActionListener());
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java
    index a60b42de12d6e..f228326955f38 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java
    @@ -29,7 +29,7 @@ public void testIndexRequest() throws IOException {
                 """);
             BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current());
             final AtomicBoolean parsed = new AtomicBoolean();
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertFalse(parsed.get());
                 assertEquals("foo", indexRequest.index());
                 assertEquals("bar", indexRequest.id());
    @@ -38,7 +38,7 @@ public void testIndexRequest() throws IOException {
             }, req -> fail(), req -> fail());
             assertTrue(parsed.get());
     
    -        parser.parse(request, "foo", null, null, null, true, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, true, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.isRequireAlias());
             }, req -> fail(), req -> fail());
     
    @@ -46,7 +46,7 @@ public void testIndexRequest() throws IOException {
                 { "index":{ "_id": "bar", "require_alias": true } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.isRequireAlias());
             }, req -> fail(), req -> fail());
     
    @@ -54,7 +54,7 @@ public void testIndexRequest() throws IOException {
                 { "index":{ "_id": "bar", "require_alias": false } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, true, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, true, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertFalse(indexRequest.isRequireAlias());
             }, req -> fail(), req -> fail());
         }
    @@ -73,6 +73,7 @@ public void testDeleteRequest() throws IOException {
                 null,
                 null,
                 null,
    +            null,
                 false,
                 XContentType.JSON,
                 (req, type) -> fail(),
    @@ -94,7 +95,7 @@ public void testUpdateRequest() throws IOException {
                 """);
             BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current());
             final AtomicBoolean parsed = new AtomicBoolean();
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
                 assertFalse(parsed.get());
                 assertEquals("foo", updateRequest.index());
                 assertEquals("bar", updateRequest.id());
    @@ -103,7 +104,7 @@ public void testUpdateRequest() throws IOException {
             }, req -> fail());
             assertTrue(parsed.get());
     
    -        parser.parse(request, "foo", null, null, null, true, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
    +        parser.parse(request, "foo", null, null, null, true, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
                 assertTrue(updateRequest.isRequireAlias());
             }, req -> fail());
     
    @@ -111,7 +112,7 @@ public void testUpdateRequest() throws IOException {
                 { "update":{ "_id": "bar", "require_alias": true } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
                 assertTrue(updateRequest.isRequireAlias());
             }, req -> fail());
     
    @@ -119,7 +120,7 @@ public void testUpdateRequest() throws IOException {
                 { "update":{ "_id": "bar", "require_alias": false } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, true, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
    +        parser.parse(request, "foo", null, null, null, true, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> {
                 assertFalse(updateRequest.isRequireAlias());
             }, req -> fail());
         }
    @@ -139,6 +140,7 @@ public void testBarfOnLackOfTrailingNewline() {
                     null,
                     null,
                     null,
    +                null,
                     false,
                     XContentType.JSON,
                     (req, type) -> fail(),
    @@ -166,6 +168,7 @@ public void testFailOnExplicitIndex() {
                     null,
                     null,
                     null,
    +                null,
                     false,
                     XContentType.JSON,
                     (req, type) -> fail(),
    @@ -183,7 +186,7 @@ public void testTypesStillParsedForBulkMonitoring() throws IOException {
                 """);
             BulkRequestParser parser = new BulkRequestParser(false, RestApiVersion.current());
             final AtomicBoolean parsed = new AtomicBoolean();
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertFalse(parsed.get());
                 assertEquals("foo", indexRequest.index());
                 assertEquals("bar", indexRequest.id());
    @@ -210,6 +213,7 @@ public void testParseDeduplicatesParameterStrings() throws IOException {
                 null,
                 null,
                 null,
    +            null,
                 true,
                 XContentType.JSON,
                 (indexRequest, type) -> indexRequests.add(indexRequest),
    @@ -241,6 +245,7 @@ public void testFailOnInvalidAction() {
                     null,
                     null,
                     null,
    +                null,
                     false,
                     XContentType.JSON,
                     (req, type) -> fail(),
    @@ -260,11 +265,11 @@ public void testListExecutedPipelines() throws IOException {
                 {}
                 """);
             BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current());
    -        parser.parse(request, "foo", null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> {
                 assertFalse(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
     
    -        parser.parse(request, "foo", null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
     
    @@ -272,7 +277,7 @@ public void testListExecutedPipelines() throws IOException {
                 { "index":{ "_id": "bar", "op_type": "create" } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
     
    @@ -280,7 +285,7 @@ public void testListExecutedPipelines() throws IOException {
                 { "create":{ "_id": "bar" } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
     
    @@ -288,7 +293,7 @@ public void testListExecutedPipelines() throws IOException {
                 { "index":{ "_id": "bar", "list_executed_pipelines": "true" } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, false, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, false, false, XContentType.JSON, (indexRequest, type) -> {
                 assertTrue(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
     
    @@ -296,7 +301,7 @@ public void testListExecutedPipelines() throws IOException {
                 { "index":{ "_id": "bar", "list_executed_pipelines": "false" } }
                 {}
                 """);
    -        parser.parse(request, "foo", null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
    +        parser.parse(request, "foo", null, null, null, null, null, true, false, XContentType.JSON, (indexRequest, type) -> {
                 assertFalse(indexRequest.getListExecutedPipelines());
             }, req -> fail(), req -> fail());
         }
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
    index 5a1c7f1572e23..366196b6a0eac 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java
    @@ -16,6 +16,7 @@
     import org.elasticsearch.action.index.IndexResponseTests;
     import org.elasticsearch.action.update.UpdateResponseTests;
     import org.elasticsearch.common.bytes.BytesReference;
    +import org.elasticsearch.common.xcontent.ChunkedToXContent;
     import org.elasticsearch.core.Tuple;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xcontent.ToXContent;
    @@ -23,11 +24,16 @@
     import org.elasticsearch.xcontent.XContentType;
     
     import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.List;
     
     import static org.elasticsearch.ElasticsearchExceptionTests.randomExceptions;
     import static org.elasticsearch.action.bulk.BulkItemResponseTests.assertBulkItemResponse;
     import static org.elasticsearch.action.bulk.BulkResponse.NO_INGEST_TOOK;
     import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
     import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
     import static org.hamcrest.Matchers.equalTo;
     
    @@ -68,11 +74,16 @@ public void testToAndFromXContent() throws IOException {
             }
     
             BulkResponse bulkResponse = new BulkResponse(bulkItems, took, ingestTook);
    -        BytesReference originalBytes = toShuffledXContent(bulkResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
    +        BytesReference originalBytes = toShuffledXContent(
    +            ChunkedToXContent.wrapAsToXContent(bulkResponse),
    +            xContentType,
    +            ToXContent.EMPTY_PARAMS,
    +            humanReadable
    +        );
     
             BulkResponse parsedBulkResponse;
             try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
    -            parsedBulkResponse = BulkResponse.fromXContent(parser);
    +            parsedBulkResponse = fromXContent(parser);
                 assertNull(parser.nextToken());
             }
     
    @@ -148,4 +159,39 @@ public void testToXContentPlacesErrorsFirst() throws IOException {
             }
             return randomDocWriteResponses;
         }
    +
    +    private static BulkResponse fromXContent(XContentParser parser) throws IOException {
    +        XContentParser.Token token = parser.nextToken();
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    +
    +        long took = -1L;
    +        long ingestTook = NO_INGEST_TOOK;
    +        List items = new ArrayList<>();
    +
    +        String currentFieldName = parser.currentName();
    +        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +            if (token == XContentParser.Token.FIELD_NAME) {
    +                currentFieldName = parser.currentName();
    +            } else if (token.isValue()) {
    +                if (BulkResponse.TOOK.equals(currentFieldName)) {
    +                    took = parser.longValue();
    +                } else if (BulkResponse.INGEST_TOOK.equals(currentFieldName)) {
    +                    ingestTook = parser.longValue();
    +                } else if (BulkResponse.ERRORS.equals(currentFieldName) == false) {
    +                    throwUnknownField(currentFieldName, parser);
    +                }
    +            } else if (token == XContentParser.Token.START_ARRAY) {
    +                if (BulkResponse.ITEMS.equals(currentFieldName)) {
    +                    while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
    +                        items.add(BulkItemResponseTests.itemResponseFromXContent(parser, items.size()));
    +                    }
    +                } else {
    +                    throwUnknownField(currentFieldName, parser);
    +                }
    +            } else {
    +                throwUnknownToken(token, parser);
    +            }
    +        }
    +        return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook);
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java
    index 1276f6c2db58b..70be3207486ec 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java
    @@ -139,12 +139,11 @@ void executeBulk(
                 }
     
                 @Override
    -            void createIndex(String index, TimeValue timeout, ActionListener listener) {
    +            void createIndex(String index, boolean requireDataStream, TimeValue timeout, ActionListener listener) {
                     try {
                         simulateAutoCreate.accept(index);
                         // If we try to create an index just immediately assume it worked
    -                    listener.onResponse(new CreateIndexResponse(true, true, index) {
    -                    });
    +                    listener.onResponse(new CreateIndexResponse(true, true, index));
                     } catch (Exception e) {
                         listener.onFailure(e);
                     }
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
    index f30bceada65d9..188adf396435f 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
    @@ -154,7 +154,7 @@ void executeBulk(
             }
     
             @Override
    -        void createIndex(String index, TimeValue timeout, ActionListener listener) {
    +        void createIndex(String index, boolean requireDataStream, TimeValue timeout, ActionListener listener) {
                 indexCreated = true;
                 listener.onResponse(null);
             }
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
    index a2e164f6a242c..c3a1747902893 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
    @@ -92,7 +92,7 @@ class TestTransportBulkAction extends TransportBulkAction {
             }
     
             @Override
    -        void createIndex(String index, TimeValue timeout, ActionListener listener) {
    +        void createIndex(String index, boolean requireDataStream, TimeValue timeout, ActionListener listener) {
                 indexCreated = true;
                 if (beforeIndexCreation != null) {
                     beforeIndexCreation.run();
    diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
    index 49dff864e7374..0a3adaf54a8ea 100644
    --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
    @@ -52,7 +52,9 @@
     
     public class TransportSimulateBulkActionTests extends ESTestCase {
     
    -    /** Services needed by bulk action */
    +    /**
    +     * Services needed by bulk action
    +     */
         private TransportService transportService;
         private ClusterService clusterService;
         private TestThreadPool threadPool;
    @@ -80,7 +82,7 @@ class TestTransportSimulateBulkAction extends TransportSimulateBulkAction {
             }
     
             @Override
    -        void createIndex(String index, TimeValue timeout, ActionListener listener) {
    +        void createIndex(String index, boolean requireDataStream, TimeValue timeout, ActionListener listener) {
                 indexCreated = true;
                 if (beforeIndexCreation != null) {
                     beforeIndexCreation.run();
    @@ -189,7 +191,7 @@ public void onFailure(Exception e) {
                     fail(e, "Unexpected error");
                 }
             };
    -        Set autoCreateIndices = Set.of(); // unused
    +        Map indicesToAutoCreate = Map.of(); // unused
             Set dataStreamsToRollover = Set.of(); // unused
             Map indicesThatCannotBeCreated = Map.of(); // unused
             long startTime = 0;
    @@ -198,7 +200,7 @@ public void onFailure(Exception e) {
                 bulkRequest,
                 randomAlphaOfLength(10),
                 listener,
    -            autoCreateIndices,
    +            indicesToAutoCreate,
                 dataStreamsToRollover,
                 indicesThatCannotBeCreated,
                 startTime
    diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java
    index d26b1c764ddb8..e7019a583b729 100644
    --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java
    @@ -48,7 +48,7 @@ public void testToXContent() throws IOException {
             {
                 DeleteResponse response = new DeleteResponse(new ShardId("index", "index_uuid", 0), "id", -1, 0, 7, true);
                 response.setForcedRefresh(true);
    -            response.setShardInfo(new ReplicationResponse.ShardInfo(10, 5));
    +            response.setShardInfo(ReplicationResponse.ShardInfo.of(10, 5));
                 String output = Strings.toString(response);
                 assertEquals(XContentHelper.stripWhitespace("""
                     {
    diff --git a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java
    index ab72bf17beca9..49697101b3234 100644
    --- a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java
    @@ -23,6 +23,7 @@
     
     import java.io.IOException;
     import java.util.Collections;
    +import java.util.Locale;
     import java.util.function.Predicate;
     
     import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
    @@ -69,7 +70,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws
             }
             GetResponse parsedGetResponse;
             try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
    -            parsedGetResponse = GetResponse.fromXContent(parser);
    +            parsedGetResponse = parseInstance(parser);
                 assertNull(parser.nextToken());
             }
             assertEquals(expectedGetResponse.getSourceAsMap(), parsedGetResponse.getSourceAsMap());
    @@ -172,7 +173,7 @@ public void testFromXContentThrowsParsingException() throws IOException {
             BytesReference originalBytes = toShuffledXContent(getResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
     
             try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
    -            ParsingException exception = expectThrows(ParsingException.class, () -> GetResponse.fromXContent(parser));
    +            ParsingException exception = expectThrows(ParsingException.class, () -> parseInstance(parser));
                 assertEquals("Missing required fields [_index,_id]", exception.getMessage());
             }
         }
    @@ -184,4 +185,19 @@ private static GetResponse copyGetResponse(GetResponse getResponse) {
         private static GetResponse mutateGetResponse(GetResponse getResponse) {
             return new GetResponse(mutateGetResult(getResponse.getResult));
         }
    +
    +    private static GetResponse parseInstance(XContentParser parser) throws IOException {
    +        GetResult getResult = GetResult.fromXContent(parser);
    +
    +        // At this stage we ensure that we parsed enough information to return
    +        // a valid GetResponse instance. If it's not the case, we throw an
    +        // exception so that callers know it and can handle it correctly.
    +        if (getResult.getIndex() == null && getResult.getId() == null) {
    +            throw new ParsingException(
    +                parser.getTokenLocation(),
    +                String.format(Locale.ROOT, "Missing required fields [%s,%s]", GetResult._INDEX, GetResult._ID)
    +            );
    +        }
    +        return new GetResponse(getResult);
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java
    index b858456d78075..fd005c450e80f 100644
    --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java
    @@ -7,15 +7,21 @@
      */
     package org.elasticsearch.action.get;
     
    +import org.elasticsearch.ElasticsearchException;
     import org.elasticsearch.common.bytes.BytesReference;
    +import org.elasticsearch.common.logging.DeprecationLogger;
     import org.elasticsearch.index.get.GetResult;
    +import org.elasticsearch.rest.action.document.RestMultiGetAction;
     import org.elasticsearch.test.ESTestCase;
    +import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentFactory;
     import org.elasticsearch.xcontent.XContentParser;
     import org.elasticsearch.xcontent.XContentType;
     
     import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.List;
     
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
    @@ -23,6 +29,12 @@
     
     public class MultiGetResponseTests extends ESTestCase {
     
    +    private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MultiGetResponse.class);
    +
    +    private static final ParseField TYPE = new ParseField("_type");
    +
    +    private static final ParseField ERROR = new ParseField("error");
    +
         public void testFromXContent() throws IOException {
             for (int runs = 0; runs < 20; runs++) {
                 MultiGetResponse expected = createTestInstance();
    @@ -30,7 +42,7 @@ public void testFromXContent() throws IOException {
                 BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false);
                 MultiGetResponse parsed;
                 try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) {
    -                parsed = MultiGetResponse.fromXContent(parser);
    +                parsed = parseInstance(parser);
                     assertNull(parser.nextToken());
                 }
                 assertNotSame(expected, parsed);
    @@ -77,4 +89,78 @@ private static MultiGetResponse createTestInstance() {
             return new MultiGetResponse(items);
         }
     
    +    public static MultiGetResponse parseInstance(XContentParser parser) throws IOException {
    +        String currentFieldName = null;
    +        List items = new ArrayList<>();
    +        for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
    +            switch (token) {
    +                case FIELD_NAME:
    +                    currentFieldName = parser.currentName();
    +                    break;
    +                case START_ARRAY:
    +                    if (MultiGetResponse.DOCS.getPreferredName().equals(currentFieldName)) {
    +                        for (token = parser.nextToken(); token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) {
    +                            if (token == XContentParser.Token.START_OBJECT) {
    +                                items.add(parseItem(parser));
    +                            }
    +                        }
    +                    }
    +                    break;
    +                default:
    +                    // If unknown tokens are encounter then these should be ignored, because
    +                    // this is parsing logic on the client side.
    +                    break;
    +            }
    +        }
    +        return new MultiGetResponse(items.toArray(new MultiGetItemResponse[0]));
    +    }
    +
    +    private static MultiGetItemResponse parseItem(XContentParser parser) throws IOException {
    +        String currentFieldName = null;
    +        String index = null;
    +        String id = null;
    +        ElasticsearchException exception = null;
    +        GetResult getResult = null;
    +        for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
    +            switch (token) {
    +                case FIELD_NAME:
    +                    currentFieldName = parser.currentName();
    +                    if (MultiGetResponse.INDEX.match(currentFieldName, parser.getDeprecationHandler()) == false
    +                        && MultiGetResponse.ID.match(currentFieldName, parser.getDeprecationHandler()) == false
    +                        && ERROR.match(currentFieldName, parser.getDeprecationHandler()) == false) {
    +                        getResult = GetResult.fromXContentEmbedded(parser, index, id);
    +                    }
    +                    break;
    +                case VALUE_STRING:
    +                    if (MultiGetResponse.INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
    +                        index = parser.text();
    +                    } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
    +                        deprecationLogger.compatibleCritical("mget_with_types", RestMultiGetAction.TYPES_DEPRECATION_MESSAGE);
    +                    } else if (MultiGetResponse.ID.match(currentFieldName, parser.getDeprecationHandler())) {
    +                        id = parser.text();
    +                    }
    +                    break;
    +                case START_OBJECT:
    +                    if (ERROR.match(currentFieldName, parser.getDeprecationHandler())) {
    +                        exception = ElasticsearchException.fromXContent(parser);
    +                    }
    +                    break;
    +                default:
    +                    // If unknown tokens are encounter then these should be ignored, because
    +                    // this is parsing logic on the client side.
    +                    break;
    +            }
    +            if (getResult != null) {
    +                break;
    +            }
    +        }
    +
    +        if (exception != null) {
    +            return new MultiGetItemResponse(null, new MultiGetResponse.Failure(index, id, exception));
    +        } else {
    +            GetResponse getResponse = new GetResponse(getResult);
    +            return new MultiGetItemResponse(getResponse, null);
    +        }
    +    }
    +
     }
    diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java
    index e2f67d9387ff5..9af522524abc9 100644
    --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java
    @@ -53,16 +53,20 @@ public void testSetSource() throws Exception {
             indexRequestBuilder.setSource(source);
             assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true));
     
    +        indexRequestBuilder = new IndexRequestBuilder(this.testClient);
             indexRequestBuilder.setSource(source, XContentType.JSON);
             assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true));
     
    +        indexRequestBuilder = new IndexRequestBuilder(this.testClient);
             indexRequestBuilder.setSource("SomeKey", "SomeValue");
             assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true));
     
             // force the Object... setter
    +        indexRequestBuilder = new IndexRequestBuilder(this.testClient);
             indexRequestBuilder.setSource((Object) "SomeKey", "SomeValue");
             assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true));
     
    +        indexRequestBuilder = new IndexRequestBuilder(this.testClient);
             ByteArrayOutputStream docOut = new ByteArrayOutputStream();
             XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field("SomeKey", "SomeValue").endObject();
             doc.close();
    @@ -72,6 +76,7 @@ public void testSetSource() throws Exception {
                 XContentHelper.convertToJson(indexRequestBuilder.request().source(), true, indexRequestBuilder.request().getContentType())
             );
     
    +        indexRequestBuilder = new IndexRequestBuilder(this.testClient);
             doc = XContentFactory.jsonBuilder().startObject().field("SomeKey", "SomeValue").endObject();
             doc.close();
             indexRequestBuilder.setSource(doc);
    diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
    index 38ffc6c46c3f3..df8aa6ce07b61 100644
    --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
    @@ -46,12 +46,10 @@
     import java.util.stream.Collectors;
     import java.util.stream.IntStream;
     
    -import static org.apache.lucene.tests.util.LuceneTestCase.expectThrows;
     import static org.hamcrest.Matchers.anEmptyMap;
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.empty;
     import static org.hamcrest.Matchers.equalTo;
    -import static org.hamcrest.Matchers.in;
     import static org.hamcrest.Matchers.not;
     import static org.hamcrest.Matchers.notNullValue;
     
    @@ -136,7 +134,7 @@ public void testIndexResponse() {
             IndexResponse indexResponse = new IndexResponse(shardId, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, version, created);
             int total = randomIntBetween(1, 10);
             int successful = randomIntBetween(1, 10);
    -        ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(total, successful);
    +        ReplicationResponse.ShardInfo shardInfo = ReplicationResponse.ShardInfo.of(total, successful);
             indexResponse.setShardInfo(shardInfo);
             boolean forcedRefresh = false;
             if (randomBoolean()) {
    @@ -463,6 +461,7 @@ public void testSerialization() throws IOException {
             assertThat(copy.ifSeqNo(), equalTo(indexRequest.ifSeqNo()));
             assertThat(copy.getFinalPipeline(), equalTo(indexRequest.getFinalPipeline()));
             assertThat(copy.ifPrimaryTerm(), equalTo(indexRequest.ifPrimaryTerm()));
    +        assertThat(copy.isRequireDataStream(), equalTo(indexRequest.isRequireDataStream()));
         }
     
         private IndexRequest createTestInstance() {
    @@ -470,6 +469,7 @@ private IndexRequest createTestInstance() {
             indexRequest.setPipeline(randomAlphaOfLength(15));
             indexRequest.setRequestId(randomLong());
             indexRequest.setRequireAlias(randomBoolean());
    +        indexRequest.setRequireDataStream(randomBoolean());
             indexRequest.setIfSeqNo(randomNonNegativeLong());
             indexRequest.setFinalPipeline(randomAlphaOfLength(20));
             indexRequest.setIfPrimaryTerm(randomNonNegativeLong());
    diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java
    index 9f97933759144..ea9e83021e781 100644
    --- a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java
    @@ -51,7 +51,7 @@ public void testToXContent() throws IOException {
             {
                 IndexResponse indexResponse = new IndexResponse(new ShardId("index", "index_uuid", 0), "id", -1, 17, 7, true);
                 indexResponse.setForcedRefresh(true);
    -            indexResponse.setShardInfo(new ReplicationResponse.ShardInfo(10, 5));
    +            indexResponse.setShardInfo(ReplicationResponse.ShardInfo.of(10, 5));
                 String output = Strings.toString(indexResponse);
                 assertEquals(XContentHelper.stripWhitespace("""
                     {
    diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java
    index 6f5841f3d2a03..c1ed3a670dffd 100644
    --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java
    @@ -26,6 +26,8 @@
     import java.util.List;
     import java.util.Map;
     
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
    +
     public class GetPipelineResponseTests extends AbstractXContentSerializingTestCase {
     
         private XContentBuilder getRandomXContentBuilder() throws IOException {
    @@ -69,7 +71,7 @@ public void testXContentDeserialization() throws IOException {
                     .xContent()
                     .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput())
             ) {
    -            parsedResponse = GetPipelineResponse.fromXContent(parser);
    +            parsedResponse = doParseInstance(parser);
             }
             List actualPipelines = response.pipelines();
             List parsedPipelines = parsedResponse.pipelines();
    @@ -82,7 +84,23 @@ public void testXContentDeserialization() throws IOException {
     
         @Override
         protected GetPipelineResponse doParseInstance(XContentParser parser) throws IOException {
    -        return GetPipelineResponse.fromXContent(parser);
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
    +        List pipelines = new ArrayList<>();
    +        while (parser.nextToken().equals(XContentParser.Token.FIELD_NAME)) {
    +            String pipelineId = parser.currentName();
    +            parser.nextToken();
    +            try (XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent())) {
    +                contentBuilder.generator().copyCurrentStructure(parser);
    +                PipelineConfiguration pipeline = new PipelineConfiguration(
    +                    pipelineId,
    +                    BytesReference.bytes(contentBuilder),
    +                    contentBuilder.contentType()
    +                );
    +                pipelines.add(pipeline);
    +            }
    +        }
    +        ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser);
    +        return new GetPipelineResponse(pipelines);
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java
    index ebfeb310a916b..921637d06b982 100644
    --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java
    @@ -8,6 +8,8 @@
     package org.elasticsearch.action.ingest;
     
     import org.elasticsearch.test.AbstractXContentTestCase;
    +import org.elasticsearch.xcontent.ConstructingObjectParser;
    +import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
    @@ -17,8 +19,24 @@
     import java.util.function.Predicate;
     import java.util.function.Supplier;
     
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
    +
     public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase {
     
    +    @SuppressWarnings("unchecked")
    +    private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    +        "simulate_document_verbose_result",
    +        true,
    +        a -> new SimulateDocumentVerboseResult((List) a[0])
    +    );
    +    static {
    +        PARSER.declareObjectArray(
    +            constructorArg(),
    +            SimulateProcessorResult.PARSER,
    +            new ParseField(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)
    +        );
    +    }
    +
         static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) {
             int numDocs = randomIntBetween(0, 5);
             List results = new ArrayList<>();
    @@ -42,7 +60,7 @@ protected SimulateDocumentVerboseResult createTestInstance() {
     
         @Override
         protected SimulateDocumentVerboseResult doParseInstance(XContentParser parser) {
    -        return SimulateDocumentVerboseResult.fromXContent(parser);
    +        return PARSER.apply(parser, null);
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java
    index 4954406c14db0..1ec54638f9687 100644
    --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java
    @@ -8,9 +8,12 @@
     
     package org.elasticsearch.action.ingest;
     
    +import org.elasticsearch.ElasticsearchException;
     import org.elasticsearch.common.io.stream.BytesStreamOutput;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.test.AbstractXContentTestCase;
    +import org.elasticsearch.xcontent.ConstructingObjectParser;
    +import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.XContentParser;
     
     import java.io.IOException;
    @@ -21,13 +24,65 @@
     import java.util.function.Predicate;
     import java.util.function.Supplier;
     
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
     import static org.hamcrest.CoreMatchers.equalTo;
     import static org.hamcrest.CoreMatchers.instanceOf;
     import static org.hamcrest.CoreMatchers.nullValue;
     
     public class SimulatePipelineResponseTests extends AbstractXContentTestCase {
     
    +    @SuppressWarnings("unchecked")
    +    private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    +        "simulate_pipeline_response",
    +        true,
    +        a -> {
    +            List results = (List) a[0];
    +            boolean verbose = false;
    +            if (results.size() > 0) {
    +                if (results.get(0) instanceof SimulateDocumentVerboseResult) {
    +                    verbose = true;
    +                }
    +            }
    +            return new SimulatePipelineResponse(null, verbose, results);
    +        }
    +    );
    +    static {
    +        PARSER.declareObjectArray(constructorArg(), (parser, context) -> {
    +            XContentParser.Token token = parser.currentToken();
    +            ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    +            SimulateDocumentResult result = null;
    +            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +                ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    +                String fieldName = parser.currentName();
    +                token = parser.nextToken();
    +                if (token == XContentParser.Token.START_ARRAY) {
    +                    if (fieldName.equals(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)) {
    +                        List results = new ArrayList<>();
    +                        while ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) {
    +                            results.add(SimulateProcessorResult.fromXContent(parser));
    +                        }
    +                        ensureExpectedToken(XContentParser.Token.END_ARRAY, token, parser);
    +                        result = new SimulateDocumentVerboseResult(results);
    +                    } else {
    +                        parser.skipChildren();
    +                    }
    +                } else if (token.equals(XContentParser.Token.START_OBJECT)) {
    +                    switch (fieldName) {
    +                        case WriteableIngestDocument.DOC_FIELD -> result = new SimulateDocumentBaseResult(
    +                            WriteableIngestDocument.INGEST_DOC_PARSER.apply(parser, null).getIngestDocument()
    +                        );
    +                        case "error" -> result = new SimulateDocumentBaseResult(ElasticsearchException.fromXContent(parser));
    +                        default -> parser.skipChildren();
    +                    }
    +                } // else it is a value skip it
    +            }
    +            assert result != null;
    +            return result;
    +        }, new ParseField(SimulatePipelineResponse.Fields.DOCUMENTS));
    +    }
    +
         public void testSerialization() throws IOException {
             boolean isVerbose = randomBoolean();
             String id = randomBoolean() ? randomAlphaOfLengthBetween(1, 10) : null;
    @@ -118,7 +173,7 @@ protected SimulatePipelineResponse createTestInstance() {
     
         @Override
         protected SimulatePipelineResponse doParseInstance(XContentParser parser) {
    -        return SimulatePipelineResponse.fromXContent(parser);
    +        return PARSER.apply(parser, null);
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
    index bd6171e353add..607d83d4aab31 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java
    @@ -75,6 +75,7 @@ private AbstractSearchAsyncAction createAction(
                 "test",
                 logger,
                 null,
    +            null,
                 nodeIdToConnection,
                 Collections.singletonMap("foo", AliasFilter.of(new MatchAllQueryBuilder())),
                 Collections.singletonMap("foo", 2.0f),
    @@ -133,8 +134,7 @@ public void testTookWithRealClock() {
     
         private void runTestTook(final boolean controlled) {
             final AtomicLong expected = new AtomicLong();
    -        var result = new ArraySearchPhaseResults<>(10);
    -        try {
    +        try (var result = new ArraySearchPhaseResults<>(10)) {
                 AbstractSearchAsyncAction action = createAction(new SearchRequest(), result, null, controlled, expected);
                 final long actual = action.buildTookInMillis();
                 if (controlled) {
    @@ -144,16 +144,13 @@ private void runTestTook(final boolean controlled) {
                     // with a real clock, the best we can say is that it took as long as we spun for
                     assertThat(actual, greaterThanOrEqualTo(TimeUnit.NANOSECONDS.toMillis(expected.get())));
                 }
    -        } finally {
    -            result.decRef();
             }
         }
     
         public void testBuildShardSearchTransportRequest() {
             SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(randomBoolean());
             final AtomicLong expected = new AtomicLong();
    -        var result = new ArraySearchPhaseResults<>(10);
    -        try {
    +        try (var result = new ArraySearchPhaseResults<>(10)) {
                 AbstractSearchAsyncAction action = createAction(searchRequest, result, null, false, expected);
                 String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
                 SearchShardIterator iterator = new SearchShardIterator(
    @@ -169,8 +166,6 @@ public void testBuildShardSearchTransportRequest() {
                 assertEquals(2.0f, shardSearchTransportRequest.indexBoost(), 0.0f);
                 assertArrayEquals(new String[] { "name", "name1" }, shardSearchTransportRequest.indices());
                 assertEquals(clusterAlias, shardSearchTransportRequest.getClusterAlias());
    -        } finally {
    -            result.decRef();
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java
    index 33e6096bab763..4a7d0cc8208e2 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java
    @@ -41,8 +41,9 @@ public void testProgressListenerExceptionsAreCaught() throws Exception {
             );
             searchProgressListener.notifyListShards(searchShards, Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, timeProvider);
     
    -        CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(searchProgressListener, 10);
    -        try {
    +        try (
    +            CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(searchProgressListener, 10)
    +        ) {
                 AtomicInteger nextCounter = new AtomicInteger(0);
                 for (int i = 0; i < 10; i++) {
                     SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), null);
    @@ -58,14 +59,16 @@ public void testProgressListenerExceptionsAreCaught() throws Exception {
                 queryPhaseResultConsumer.reduce();
                 assertEquals(1, searchProgressListener.onFinalReduce.get());
                 assertEquals(10, nextCounter.get());
    -        } finally {
    -            queryPhaseResultConsumer.decRef();
             }
         }
     
         public void testNullShardResultHandling() throws Exception {
    -        CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(SearchProgressListener.NOOP, 10);
    -        try {
    +        try (
    +            CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(
    +                SearchProgressListener.NOOP,
    +                10
    +            )
    +        ) {
                 AtomicInteger nextCounter = new AtomicInteger(0);
                 for (int i = 0; i < 10; i++) {
                     SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), null);
    @@ -79,20 +82,20 @@ public void testNullShardResultHandling() throws Exception {
                 assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation);
                 assertFalse(reducePhase.isEmptyResult());
                 assertEquals(10, nextCounter.get());
    -        } finally {
    -            queryPhaseResultConsumer.decRef();
             }
         }
     
         public void testEmptyResults() throws Exception {
    -        CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(SearchProgressListener.NOOP, 10);
    -        try {
    +        try (
    +            CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(
    +                SearchProgressListener.NOOP,
    +                10
    +            )
    +        ) {
                 var reducePhase = queryPhaseResultConsumer.reduce();
                 assertEquals(0, reducePhase.totalHits().value);
                 assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation);
                 assertTrue(reducePhase.isEmptyResult());
    -        } finally {
    -            queryPhaseResultConsumer.decRef();
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java
    index 838e13d6026c7..bc31f5f92f9b5 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java
    @@ -23,8 +23,7 @@
     
     public class CountedCollectorTests extends ESTestCase {
         public void testCollect() throws InterruptedException {
    -        ArraySearchPhaseResults consumer = new ArraySearchPhaseResults<>(randomIntBetween(1, 100));
    -        try {
    +        try (ArraySearchPhaseResults consumer = new ArraySearchPhaseResults<>(randomIntBetween(1, 100))) {
                 List state = new ArrayList<>();
                 int numResultsExpected = randomIntBetween(1, consumer.getAtomicArray().length());
                 MockSearchPhaseContext context = new MockSearchPhaseContext(consumer.getAtomicArray().length());
    @@ -93,8 +92,6 @@ public void testCollect() throws InterruptedException {
                 for (int i = numResultsExpected; i < results.length(); i++) {
                     assertNull("index: " + i, results.get(i));
                 }
    -        } finally {
    -            consumer.decRef();
             }
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
    index b14d24cf95f62..e9ff8336ef4c9 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
    @@ -125,16 +125,17 @@ public void sendExecuteQuery(
             SearchPhaseController searchPhaseController = searchPhaseController();
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             mockSearchPhaseContext.searchTransport = searchTransportService;
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.searchRequest,
    -            results.length(),
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.searchRequest,
    +                results.length(),
    +                exc -> {}
    +            )
    +        ) {
                 DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") {
                     @Override
                     public void run() throws IOException {
    @@ -155,8 +156,6 @@ public void run() throws IOException {
                 assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc);
                 assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
                 assertEquals(2, mockSearchPhaseContext.numSuccess.get());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -211,16 +210,17 @@ public void sendExecuteQuery(
             SearchPhaseController searchPhaseController = searchPhaseController();
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             mockSearchPhaseContext.searchTransport = searchTransportService;
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.searchRequest,
    -            results.length(),
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.searchRequest,
    +                results.length(),
    +                exc -> {}
    +            )
    +        ) {
                 DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") {
                     @Override
                     public void run() throws IOException {
    @@ -243,8 +243,6 @@ public void run() throws IOException {
                 assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
                 assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(new ShardSearchContextId("", 2L)));
                 assertNull(responseRef.get().get(1));
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -299,16 +297,17 @@ public void sendExecuteQuery(
             SearchPhaseController searchPhaseController = searchPhaseController();
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             mockSearchPhaseContext.searchTransport = searchTransportService;
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.searchRequest,
    -            results.length(),
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.searchRequest,
    +                results.length(),
    +                exc -> {}
    +            )
    +        ) {
                 DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") {
                     @Override
                     public void run() throws IOException {
    @@ -320,8 +319,6 @@ public void run() throws IOException {
                 assertThat(mockSearchPhaseContext.failures, hasSize(1));
                 assertThat(mockSearchPhaseContext.failures.get(0).getCause(), instanceOf(UncheckedIOException.class));
                 assertThat(mockSearchPhaseContext.releasedSearchContexts, hasSize(1)); // phase execution will clean up on the contexts
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -339,15 +336,28 @@ public void testRewriteShardSearchRequestWithRank() {
     
             QueryBuilder bm25 = new TermQueryBuilder("field", "term");
             SearchSourceBuilder ssb = new SearchSourceBuilder().query(bm25)
    -            .knnSearch(List.of(new KnnSearchBuilder("vector", new float[] { 0.0f }, 10, 100, null)))
    +            .knnSearch(
    +                List.of(
    +                    new KnnSearchBuilder("vector", new float[] { 0.0f }, 10, 100, null),
    +                    new KnnSearchBuilder("vector2", new float[] { 0.0f }, 10, 100, null)
    +                )
    +            )
                 .rankBuilder(new TestRankBuilder(100));
             SearchRequest sr = new SearchRequest().allowPartialSearchResults(true).source(ssb);
             ShardSearchRequest ssr = new ShardSearchRequest(null, sr, new ShardId("test", "testuuid", 1), 1, 1, null, 1.0f, 0, null);
     
             dqp.rewriteShardSearchRequest(ssr);
     
    -        KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) });
    -        KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) });
    +        KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder(
    +            new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) },
    +            "vector",
    +            new float[] { 0.0f }
    +        );
    +        KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder(
    +            new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) },
    +            "vector2",
    +            new float[] { 0.0f }
    +        );
             assertEquals(
                 List.of(bm25, ksdqb0, ksdqb1),
                 List.of(
    diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
    index 648cb8aa60158..0a98b12444f9c 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
    @@ -42,8 +42,8 @@ public void testCollapseSingleHit() throws IOException {
                 final int numInnerHits = randomIntBetween(1, 5);
                 List collapsedHits = new ArrayList<>(numInnerHits);
                 for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
    -                SearchHits hits = new SearchHits(
    -                    new SearchHit[] { new SearchHit(innerHitNum, "ID"), new SearchHit(innerHitNum + 1, "ID") },
    +                SearchHits hits = SearchHits.unpooled(
    +                    new SearchHit[] { SearchHit.unpooled(innerHitNum, "ID"), SearchHit.unpooled(innerHitNum + 1, "ID") },
                         new TotalHits(2, TotalHits.Relation.EQUAL_TO),
                         1.0F
                     );
    @@ -98,6 +98,8 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL
                                     sections.decRef();
                                 }
                                 mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null));
    +                            // transferring ownership to the multi-search response so no need to release here
    +                            mockSearchPhaseContext.searchResponse.set(null);
                             }
     
                             ActionListener.respondAndRelease(
    @@ -110,37 +112,43 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL
                     SearchHit hit = new SearchHit(1, "ID");
                     hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue)));
                     SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
    -                ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    -                    @Override
    -                    public void run() {
    -                        var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    -                        try {
    -                            mockSearchPhaseContext.sendSearchResponse(sections, null);
    -                        } finally {
    -                            sections.decRef();
    +                try {
    +                    ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    +                        @Override
    +                        public void run() {
    +                            var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    +                            try {
    +                                mockSearchPhaseContext.sendSearchResponse(sections, null);
    +                            } finally {
    +                                sections.decRef();
    +                            }
                             }
    -                    }
    -                });
    +                    });
     
    -                phase.run();
    -                mockSearchPhaseContext.assertNoFailure();
    -                SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get();
    -                assertNotNull(theResponse);
    -                assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size());
    +                    phase.run();
    +                    mockSearchPhaseContext.assertNoFailure();
    +                    SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get();
    +                    assertNotNull(theResponse);
    +                    assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size());
     
    -                for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
    -                    assertSame(
    -                        theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum),
    -                        collapsedHits.get(innerHitNum)
    -                    );
    -                }
    +                    for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
    +                        assertSame(
    +                            theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum),
    +                            collapsedHits.get(innerHitNum)
    +                        );
    +                    }
     
    -                assertTrue(executedMultiSearch.get());
    +                    assertTrue(executedMultiSearch.get());
    +                } finally {
    +                    hits.decRef();
    +                }
                 } finally {
    +                mockSearchPhaseContext.execute(() -> {});
                     var resp = mockSearchPhaseContext.searchResponse.get();
                     if (resp != null) {
                         resp.decRef();
                     }
    +
                 }
             }
         }
    @@ -198,22 +206,28 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL
             SearchHit hit2 = new SearchHit(2, "ID2");
             hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue)));
             SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
    -        ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    -            @Override
    -            public void run() {
    -                var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    -                try {
    -                    mockSearchPhaseContext.sendSearchResponse(sections, null);
    -                } finally {
    -                    sections.decRef();
    +        try {
    +            ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    +                @Override
    +                public void run() {
    +                    var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    +                    try {
    +                        mockSearchPhaseContext.sendSearchResponse(sections, null);
    +                    } finally {
    +                        sections.decRef();
    +                    }
                     }
    -            }
    -        });
    -        phase.run();
    -        assertThat(mockSearchPhaseContext.phaseFailure.get(), Matchers.instanceOf(RuntimeException.class));
    -        assertEquals("boom", mockSearchPhaseContext.phaseFailure.get().getMessage());
    -        assertNotNull(mockSearchPhaseContext.phaseFailure.get());
    -        assertNull(mockSearchPhaseContext.searchResponse.get());
    +            });
    +            phase.run();
    +            assertThat(mockSearchPhaseContext.phaseFailure.get(), Matchers.instanceOf(RuntimeException.class));
    +            assertEquals("boom", mockSearchPhaseContext.phaseFailure.get().getMessage());
    +            assertNotNull(mockSearchPhaseContext.phaseFailure.get());
    +            assertNull(mockSearchPhaseContext.searchResponse.get());
    +        } finally {
    +            mockSearchPhaseContext.execute(() -> {});
    +            hits.decRef();
    +            collapsedHits.decRef();
    +        }
         }
     
         public void testSkipPhase() throws IOException {
    @@ -231,21 +245,26 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL
                 SearchHit hit2 = new SearchHit(2, "ID2");
                 hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null)));
                 SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
    -            ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    -                @Override
    -                public void run() {
    -                    var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    -                    try {
    -                        mockSearchPhaseContext.sendSearchResponse(sections, null);
    -                    } finally {
    -                        sections.decRef();
    +            try {
    +                ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") {
    +                    @Override
    +                    public void run() {
    +                        var sections = new SearchResponseSections(hits, null, null, false, null, null, 1);
    +                        try {
    +                            mockSearchPhaseContext.sendSearchResponse(sections, null);
    +                        } finally {
    +                            sections.decRef();
    +                        }
                         }
    -                }
    -            });
    -            phase.run();
    -            mockSearchPhaseContext.assertNoFailure();
    -            assertNotNull(mockSearchPhaseContext.searchResponse.get());
    +                });
    +                phase.run();
    +                mockSearchPhaseContext.assertNoFailure();
    +                assertNotNull(mockSearchPhaseContext.searchResponse.get());
    +            } finally {
    +                hits.decRef();
    +            }
             } finally {
    +            mockSearchPhaseContext.execute(() -> {});
                 var resp = mockSearchPhaseContext.searchResponse.get();
                 if (resp != null) {
                     resp.decRef();
    @@ -328,6 +347,7 @@ public void run() {
                 phase.run();
                 mockSearchPhaseContext.assertNoFailure();
                 assertNotNull(mockSearchPhaseContext.searchResponse.get());
    +            mockSearchPhaseContext.execute(() -> {});
             } finally {
                 var resp = mockSearchPhaseContext.searchResponse.get();
                 if (resp != null) {
    diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java
    index 035d01108d655..95a4efcca5fa2 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java
    @@ -52,9 +52,11 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL
                     phase.run();
                 } finally {
                     sections.decRef();
    +                hits.decRef();
                 }
                 searchPhaseContext.assertNoFailure();
                 assertNotNull(searchPhaseContext.searchResponse.get());
    +            searchPhaseContext.execute(() -> {});
             } finally {
                 var resp = searchPhaseContext.searchResponse.get();
                 if (resp != null) {
    @@ -126,6 +128,7 @@ void sendExecuteMultiSearch(
                                 ),
                                 null
                             );
    +                        searchHits.decRef();
                         }
                         ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong()));
                     }
    @@ -192,6 +195,7 @@ void sendExecuteMultiSearch(
                     phase.run();
                 } finally {
                     sections.decRef();
    +                searchHits.decRef();
                 }
                 assertTrue(requestSent.get());
                 searchPhaseContext.assertNoFailure();
    @@ -220,6 +224,7 @@ void sendExecuteMultiSearch(
                     leftHit1.field("lookup_field_3").getValues(),
                     contains(Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2")))
                 );
    +            searchPhaseContext.execute(() -> {});
             } finally {
                 var resp = searchPhaseContext.searchResponse.get();
                 if (resp != null) {
    diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java
    index 4594810da575a..a2c5bed51f5e7 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java
    @@ -50,16 +50,17 @@ public class FetchSearchPhaseTests extends ESTestCase {
         public void testShortcutQueryAndFetchOptimization() {
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            1,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                1,
    +                exc -> {}
    +            )
    +        ) {
                 boolean hasHits = randomBoolean();
                 boolean profiled = hasHits && randomBoolean();
                 final int numHits;
    @@ -78,8 +79,8 @@ public void testShortcutQueryAndFetchOptimization() {
                     FetchSearchResult fetchResult = new FetchSearchResult();
                     try {
                         fetchResult.setSearchShardTarget(queryResult.getSearchShardTarget());
    -                    SearchHits hits = new SearchHits(
    -                        new SearchHit[] { new SearchHit(42) },
    +                    SearchHits hits = SearchHits.unpooled(
    +                        new SearchHit[] { SearchHit.unpooled(42) },
                             new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                             1.0F
                         );
    @@ -126,7 +127,6 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
         }
     
    @@ -144,16 +144,17 @@ private void assertProfiles(boolean profiled, int totalShards, SearchResponse se
         public void testFetchTwoDocument() {
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            2,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                2,
    +                exc -> {}
    +            )
    +        ) {
                 int resultSetSize = randomIntBetween(2, 10);
                 boolean profiled = randomBoolean();
     
    @@ -209,16 +210,16 @@ public void sendExecuteFetch(
                             SearchHits hits;
                             if (request.contextId().equals(ctx2)) {
                                 fetchResult.setSearchShardTarget(shard2Target);
    -                            hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(84) },
    +                            hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(84) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     2.0F
                                 );
                             } else {
                                 assertEquals(ctx1, request.contextId());
                                 fetchResult.setSearchShardTarget(shard1Target);
    -                            hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(42) },
    +                            hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(42) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     1.0F
                                 );
    @@ -258,23 +259,23 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
         }
     
         public void testFailFetchOneDoc() {
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            2,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                2,
    +                exc -> {}
    +            )
    +        ) {
                 int resultSetSize = randomIntBetween(2, 10);
                 boolean profiled = randomBoolean();
     
    @@ -327,8 +328,8 @@ public void sendExecuteFetch(
                             FetchSearchResult fetchResult = new FetchSearchResult();
                             try {
                                 fetchResult.setSearchShardTarget(shard1Target);
    -                            SearchHits hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(84) },
    +                            SearchHits hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(84) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     2.0F
                                 );
    @@ -386,7 +387,6 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
         }
     
    @@ -397,16 +397,17 @@ public void testFetchDocsConcurrently() throws InterruptedException {
             boolean profiled = randomBoolean();
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits);
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            numHits,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                numHits,
    +                exc -> {}
    +            )
    +        ) {
                 SearchShardTarget[] shardTargets = new SearchShardTarget[numHits];
                 for (int i = 0; i < numHits; i++) {
                     shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null);
    @@ -439,8 +440,8 @@ public void sendExecuteFetch(
                             FetchSearchResult fetchResult = new FetchSearchResult();
                             try {
                                 fetchResult.setSearchShardTarget(shardTargets[(int) request.contextId().getId()]);
    -                            SearchHits hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit((int) (request.contextId().getId() + 1)) },
    +                            SearchHits hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled((int) (request.contextId().getId() + 1)) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     100F
                                 );
    @@ -505,23 +506,23 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
         }
     
         public void testExceptionFailsPhase() {
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            2,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                2,
    +                exc -> {}
    +            )
    +        ) {
                 int resultSetSize = randomIntBetween(2, 10);
                 boolean profiled = randomBoolean();
     
    @@ -578,16 +579,16 @@ public void sendExecuteFetch(
                             SearchHits hits;
                             if (request.contextId().getId() == 321) {
                                 fetchResult.setSearchShardTarget(shard2Target);
    -                            hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(84) },
    +                            hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(84) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     2.0F
                                 );
                             } else {
                                 fetchResult.setSearchShardTarget(shard1Target);
                                 assertEquals(request, 123);
    -                            hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(42) },
    +                            hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(42) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     1.0F
                                 );
    @@ -620,23 +621,23 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
         }
     
         public void testCleanupIrrelevantContexts() { // contexts that are not fetched should be cleaned up
             MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
             SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder());
    -        SearchPhaseResults results = controller.newSearchPhaseResults(
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            mockSearchPhaseContext.getRequest(),
    -            2,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults results = controller.newSearchPhaseResults(
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                mockSearchPhaseContext.getRequest(),
    +                2,
    +                exc -> {}
    +            )
    +        ) {
                 int resultSetSize = 1;
                 boolean profiled = randomBoolean();
     
    @@ -689,8 +690,8 @@ public void sendExecuteFetch(
                         try {
                             if (request.contextId().getId() == 321) {
                                 fetchResult.setSearchShardTarget(shard1Target);
    -                            SearchHits hits = new SearchHits(
    -                                new SearchHit[] { new SearchHit(84) },
    +                            SearchHits hits = SearchHits.unpooled(
    +                                new SearchHit[] { SearchHit.unpooled(84) },
                                     new TotalHits(1, TotalHits.Relation.EQUAL_TO),
                                     2.0F
                                 );
    @@ -740,7 +741,6 @@ public void run() {
                 if (resp != null) {
                     resp.decRef();
                 }
    -            results.decRef();
             }
     
         }
    diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
    index 1a510058e3bbd..ed807091ae9a2 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
    @@ -98,6 +98,8 @@ public void sendSearchResponse(SearchResponseSections internalSearchResponse, At
                     searchContextId
                 )
             );
    +        Releasables.close(releasables);
    +        releasables.clear();
             if (existing != null) {
                 existing.decRef();
             }
    @@ -147,12 +149,7 @@ public void addReleasable(Releasable releasable) {
     
         @Override
         public void execute(Runnable command) {
    -        try {
    -            command.run();
    -        } finally {
    -            Releasables.close(releasables);
    -            releasables.clear();
    -        }
    +        command.run();
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java
    index 7e1e7de03e288..91bf1059225d8 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java
    @@ -96,7 +96,7 @@ protected OpenPointInTimeRequest mutateInstance(OpenPointInTimeRequest in) throw
         }
     
         public void testUseDefaultConcurrentForOldVersion() throws Exception {
    -        TransportVersion previousVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_500_020);
    +        TransportVersion previousVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_9_X);
             try (BytesStreamOutput output = new BytesStreamOutput()) {
                 TransportVersion version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_0_0, previousVersion);
                 output.setTransportVersion(version);
    diff --git a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java
    index 6035950ca4635..db32213ff97b7 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java
    @@ -103,20 +103,21 @@ public void testProgressListenerExceptionsAreCaught() throws Exception {
             SearchRequest searchRequest = new SearchRequest("index");
             searchRequest.setBatchedReduceSize(2);
             AtomicReference onPartialMergeFailure = new AtomicReference<>();
    -        QueryPhaseResultConsumer queryPhaseResultConsumer = new QueryPhaseResultConsumer(
    -            searchRequest,
    -            executor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            searchPhaseController,
    -            () -> false,
    -            searchProgressListener,
    -            10,
    -            e -> onPartialMergeFailure.accumulateAndGet(e, (prev, curr) -> {
    -                curr.addSuppressed(prev);
    -                return curr;
    -            })
    -        );
    -        try {
    +        try (
    +            QueryPhaseResultConsumer queryPhaseResultConsumer = new QueryPhaseResultConsumer(
    +                searchRequest,
    +                executor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                searchPhaseController,
    +                () -> false,
    +                searchProgressListener,
    +                10,
    +                e -> onPartialMergeFailure.accumulateAndGet(e, (prev, curr) -> {
    +                    curr.addSuppressed(prev);
    +                    return curr;
    +                })
    +            )
    +        ) {
     
                 CountDownLatch partialReduceLatch = new CountDownLatch(10);
     
    @@ -137,8 +138,6 @@ public void testProgressListenerExceptionsAreCaught() throws Exception {
     
                 queryPhaseResultConsumer.reduce();
                 assertEquals(1, searchProgressListener.onFinalReduce.get());
    -        } finally {
    -            queryPhaseResultConsumer.decRef();
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
    index d6b1bd8057708..cb41a03216dc5 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
    @@ -95,6 +95,7 @@ public void testSkipSearchShards() throws InterruptedException {
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction(
                 "test",
                 logger,
    +            null,
                 transportService,
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
    @@ -198,11 +199,11 @@ public void testLimitConcurrentShardRequests() throws InterruptedException {
             Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY);
             CountDownLatch awaitInitialRequests = new CountDownLatch(1);
             AtomicInteger numRequests = new AtomicInteger(0);
    -        var results = new ArraySearchPhaseResults(shardsIter.size());
    -        try {
    +        try (var results = new ArraySearchPhaseResults(shardsIter.size())) {
                 AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                     "test",
                     logger,
    +                null,
                     transportService,
                     (cluster, node) -> {
                         assert cluster == null : "cluster was not null: " + cluster;
    @@ -269,8 +270,6 @@ public void run() {
                 latch.await();
                 assertTrue(searchPhaseDidRun.get());
                 assertEquals(numShards, numRequests.get());
    -        } finally {
    -            results.decRef();
             }
         }
     
    @@ -312,12 +311,12 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI
             ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors()));
             final CountDownLatch latch = new CountDownLatch(1);
             final AtomicBoolean latchTriggered = new AtomicBoolean();
    -        var results = new ArraySearchPhaseResults(shardsIter.size());
             final TestSearchResponse testResponse = new TestSearchResponse();
    -        try {
    +        try (var results = new ArraySearchPhaseResults(shardsIter.size())) {
                 AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                     "test",
                     logger,
    +                null,
                     transportService,
                     (cluster, node) -> {
                         assert cluster == null : "cluster was not null: " + cluster;
    @@ -392,7 +391,6 @@ public void run() {
                 assertThat(runnables, equalTo(Collections.emptyList()));
             } finally {
                 testResponse.decRef();
    -            results.decRef();
             }
         }
     
    @@ -443,6 +441,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI
                 AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                     "test",
                     logger,
    +                null,
                     transportService,
                     (cluster, node) -> {
                         assert cluster == null : "cluster was not null: " + cluster;
    @@ -546,11 +545,11 @@ public void testAllowPartialResults() throws InterruptedException {
             Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY);
             AtomicInteger numRequests = new AtomicInteger(0);
             AtomicInteger numFailReplicas = new AtomicInteger(0);
    -        var results = new ArraySearchPhaseResults(shardsIter.size());
    -        try {
    +        try (var results = new ArraySearchPhaseResults(shardsIter.size())) {
                 AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                     "test",
                     logger,
    +                null,
                     transportService,
                     (cluster, node) -> {
                         assert cluster == null : "cluster was not null: " + cluster;
    @@ -615,8 +614,6 @@ public void run() {
                 assertTrue(searchPhaseDidRun.get());
                 assertEquals(numShards, numRequests.get());
                 assertThat(numFailReplicas.get(), greaterThanOrEqualTo(1));
    -        } finally {
    -            results.decRef();
             }
         }
     
    @@ -652,6 +649,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException {
             AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>(
                 "test",
                 logger,
    +            null,
                 new SearchTransportService(null, null, null),
                 (cluster, node) -> {
                     assert cluster == null : "cluster was not null: " + cluster;
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java
    index ac88f999adef6..1f81ad2a02e8c 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java
    @@ -558,7 +558,7 @@ private static AtomicArray generateFetchResults(
                 List searchHits = new ArrayList<>();
                 for (ScoreDoc scoreDoc : mergedSearchDocs) {
                     if (scoreDoc.shardIndex == shardIndex) {
    -                    searchHits.add(new SearchHit(scoreDoc.doc, ""));
    +                    searchHits.add(SearchHit.unpooled(scoreDoc.doc, ""));
                         if (scoreDoc.score > maxScore) {
                             maxScore = scoreDoc.score;
                         }
    @@ -570,7 +570,7 @@ private static AtomicArray generateFetchResults(
                             for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) {
                                 ScoreDoc doc = option.getDoc();
                                 if (doc.shardIndex == shardIndex) {
    -                                searchHits.add(new SearchHit(doc.doc, ""));
    +                                searchHits.add(SearchHit.unpooled(doc.doc, ""));
                                     if (doc.score > maxScore) {
                                         maxScore = doc.score;
                                     }
    @@ -583,7 +583,10 @@ private static AtomicArray generateFetchResults(
                 ProfileResult profileResult = profile && searchHits.size() > 0
                     ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), randomNonNegativeLong(), List.of())
                     : null;
    -            fetchSearchResult.shardResult(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), profileResult);
    +            fetchSearchResult.shardResult(
    +                SearchHits.unpooled(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore),
    +                profileResult
    +            );
                 fetchResults.set(shardIndex, fetchSearchResult);
             }
             return fetchResults;
    @@ -610,16 +613,17 @@ private void consumerTestCase(int numEmptyResponses) throws Exception {
             SearchRequest request = randomSearchRequest();
             request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")));
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            3 + numEmptyResponses,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                3 + numEmptyResponses,
    +                exc -> {}
    +            )
    +        ) {
                 if (numEmptyResponses == 0) {
                     assertEquals(0, reductions.size());
                 }
    @@ -723,8 +727,6 @@ private void consumerTestCase(int numEmptyResponses) throws Exception {
                 assertNull(reduce.sortedTopDocs().sortFields());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -735,16 +737,17 @@ public void testConsumerConcurrently() throws Exception {
             SearchRequest request = randomSearchRequest();
             request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")));
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 AtomicInteger max = new AtomicInteger();
                 Thread[] threads = new Thread[expectedNumResults];
                 CountDownLatch latch = new CountDownLatch(expectedNumResults);
    @@ -797,8 +800,6 @@ public void testConsumerConcurrently() throws Exception {
                 assertNull(reduce.sortedTopDocs().sortFields());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -808,16 +809,17 @@ public void testConsumerOnlyAggs() throws Exception {
             SearchRequest request = randomSearchRequest();
             request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")).size(0));
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 AtomicInteger max = new AtomicInteger();
                 CountDownLatch latch = new CountDownLatch(expectedNumResults);
                 for (int i = 0; i < expectedNumResults; i++) {
    @@ -857,8 +859,6 @@ public void testConsumerOnlyAggs() throws Exception {
                 assertNull(reduce.sortedTopDocs().sortFields());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -870,16 +870,18 @@ public void testConsumerOnlyHits() throws Exception {
                 request.source(new SearchSourceBuilder().size(randomIntBetween(1, 10)));
             }
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 AtomicInteger max = new AtomicInteger();
                 CountDownLatch latch = new CountDownLatch(expectedNumResults);
                 for (int i = 0; i < expectedNumResults; i++) {
    @@ -916,8 +918,6 @@ public void testConsumerOnlyHits() throws Exception {
                 assertNull(reduce.sortedTopDocs().sortFields());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -937,16 +937,17 @@ public void testReduceTopNWithFromOffset() throws Exception {
             SearchRequest request = new SearchRequest();
             request.source(new SearchSourceBuilder().size(5).from(5));
             request.setBatchedReduceSize(randomIntBetween(2, 4));
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            4,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                4,
    +                exc -> {}
    +            )
    +        ) {
                 int score = 100;
                 CountDownLatch latch = new CountDownLatch(4);
                 for (int i = 0; i < 4; i++) {
    @@ -984,8 +985,6 @@ public void testReduceTopNWithFromOffset() throws Exception {
                 assertEquals(93.0f, scoreDocs[2].score, 0.0f);
                 assertEquals(92.0f, scoreDocs[3].score, 0.0f);
                 assertEquals(91.0f, scoreDocs[4].score, 0.0f);
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -995,16 +994,17 @@ public void testConsumerSortByField() throws Exception {
             SearchRequest request = randomSearchRequest();
             int size = randomIntBetween(1, 10);
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 AtomicInteger max = new AtomicInteger();
                 SortField[] sortFields = { new SortField("field", SortField.Type.INT, true) };
                 DocValueFormat[] docValueFormats = { DocValueFormat.RAW };
    @@ -1040,8 +1040,6 @@ public void testConsumerSortByField() throws Exception {
                 assertEquals(SortField.Type.INT, reduce.sortedTopDocs().sortFields()[0].getType());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -1051,16 +1049,17 @@ public void testConsumerFieldCollapsing() throws Exception {
             SearchRequest request = randomSearchRequest();
             int size = randomIntBetween(5, 10);
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 SortField[] sortFields = { new SortField("field", SortField.Type.STRING) };
                 BytesRef a = new BytesRef("a");
                 BytesRef b = new BytesRef("b");
    @@ -1100,8 +1099,6 @@ public void testConsumerFieldCollapsing() throws Exception {
                 assertEquals(SortField.Type.STRING, reduce.sortedTopDocs().sortFields()[0].getType());
                 assertEquals("field", reduce.sortedTopDocs().collapseField());
                 assertArrayEquals(collapseValues, reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -1110,16 +1107,17 @@ public void testConsumerSuggestions() throws Exception {
             int bufferSize = randomIntBetween(2, 200);
             SearchRequest request = randomSearchRequest();
             request.setBatchedReduceSize(bufferSize);
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> {}
    +            )
    +        ) {
                 int maxScoreTerm = -1;
                 int maxScorePhrase = -1;
                 int maxScoreCompletion = -1;
    @@ -1216,8 +1214,6 @@ public void testConsumerSuggestions() throws Exception {
                 assertNull(reduce.sortedTopDocs().sortFields());
                 assertNull(reduce.sortedTopDocs().collapseField());
                 assertNull(reduce.sortedTopDocs().collapseValues());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    @@ -1257,16 +1253,17 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna
                         assertEquals(numReduceListener.incrementAndGet(), reducePhase);
                     }
                 };
    -            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -                fixedExecutor,
    -                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -                () -> false,
    -                progressListener,
    -                request,
    -                expectedNumResults,
    -                exc -> {}
    -            );
    -            try {
    +            try (
    +                SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                    fixedExecutor,
    +                    new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                    () -> false,
    +                    progressListener,
    +                    request,
    +                    expectedNumResults,
    +                    exc -> {}
    +                )
    +            ) {
                     AtomicInteger max = new AtomicInteger();
                     Thread[] threads = new Thread[expectedNumResults];
                     CountDownLatch latch = new CountDownLatch(expectedNumResults);
    @@ -1324,8 +1321,6 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna
                     assertEquals(expectedNumResults, numQueryResultListener.get());
                     assertEquals(0, numQueryFailureListener.get());
                     assertEquals(numReduceListener.get(), reduce.numReducePhases());
    -            } finally {
    -                consumer.decRef();
                 }
             }
         }
    @@ -1348,16 +1343,17 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t
             if (shouldFailPartial) {
                 circuitBreaker.shouldBreak.set(true);
             }
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            circuitBreaker,
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            numShards,
    -            exc -> hasConsumedFailure.set(true)
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                circuitBreaker,
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                numShards,
    +                exc -> hasConsumedFailure.set(true)
    +            )
    +        ) {
                 CountDownLatch latch = new CountDownLatch(numShards);
                 Thread[] threads = new Thread[numShards];
                 for (int i = 0; i < numShards; i++) {
    @@ -1406,8 +1402,6 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t
                 } else {
                     consumer.reduce();
                 }
    -        } finally {
    -            consumer.decRef();
             }
             assertThat(circuitBreaker.allocated, equalTo(0L));
         }
    @@ -1420,16 +1414,17 @@ public void testFailConsumeAggs() throws Exception {
             request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo")).size(0));
             request.setBatchedReduceSize(bufferSize);
             AtomicBoolean hasConsumedFailure = new AtomicBoolean();
    -        SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    -            fixedExecutor,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            () -> false,
    -            SearchProgressListener.NOOP,
    -            request,
    -            expectedNumResults,
    -            exc -> hasConsumedFailure.set(true)
    -        );
    -        try {
    +        try (
    +            SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(
    +                fixedExecutor,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                () -> false,
    +                SearchProgressListener.NOOP,
    +                request,
    +                expectedNumResults,
    +                exc -> hasConsumedFailure.set(true)
    +            )
    +        ) {
                 for (int i = 0; i < expectedNumResults; i++) {
                     final int index = i;
                     QuerySearchResult result = new QuerySearchResult(
    @@ -1454,8 +1449,6 @@ public void testFailConsumeAggs() throws Exception {
                     }
                 }
                 assertNull(consumer.reduce().aggregations());
    -        } finally {
    -            consumer.decRef();
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
    index a973fa20851db..aef472928923b 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
    @@ -187,19 +187,21 @@ public void sendExecuteQuery(
             searchRequest.allowPartialSearchResults(false);
             SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder());
             SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap());
    -        QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer(
    -            searchRequest,
    -            EsExecutors.DIRECT_EXECUTOR_SERVICE,
    -            new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    -            controller,
    -            task::isCancelled,
    -            task.getProgressListener(),
    -            shardsIter.size(),
    -            exc -> {}
    -        );
    -        try {
    +        try (
    +            QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer(
    +                searchRequest,
    +                EsExecutors.DIRECT_EXECUTOR_SERVICE,
    +                new NoopCircuitBreaker(CircuitBreaker.REQUEST),
    +                controller,
    +                task::isCancelled,
    +                task.getProgressListener(),
    +                shardsIter.size(),
    +                exc -> {}
    +            )
    +        ) {
                 SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
                     logger,
    +                null,
                     searchTransportService,
                     (clusterAlias, node) -> lookup.get(node),
                     Collections.singletonMap("_na_", AliasFilter.EMPTY),
    @@ -251,8 +253,6 @@ public void run() {
                 assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class));
                 assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields.length, equalTo(1));
                 assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields[0], equalTo(0));
    -        } finally {
    -            resultConsumer.decRef();
             }
         }
     
    @@ -351,6 +351,7 @@ private void testMixedVersionsShardsSearch(VersionInformation oldVersion, Versio
             final List responses = new ArrayList<>();
             SearchQueryThenFetchAsyncAction newSearchAsyncAction = new SearchQueryThenFetchAsyncAction(
                 logger,
    +            null,
                 searchTransportService,
                 (clusterAlias, node) -> lookup.get(node),
                 Collections.singletonMap("_na_", AliasFilter.EMPTY),
    @@ -499,6 +500,7 @@ public void sendExecuteQuery(
             CountDownLatch latch = new CountDownLatch(1);
             SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
                 logger,
    +            null,
                 searchTransportService,
                 (clusterAlias, node) -> lookup.get(node),
                 Collections.singletonMap("_na_", AliasFilter.EMPTY),
    @@ -648,6 +650,7 @@ public void sendExecuteQuery(
             CountDownLatch latch = new CountDownLatch(1);
             SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction(
                 logger,
    +            null,
                 searchTransportService,
                 (clusterAlias, node) -> lookup.get(node),
                 Collections.singletonMap("_na_", AliasFilter.EMPTY),
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java
    index 8c0ffeabf0ea6..6d66a1fcd3847 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java
    @@ -154,7 +154,7 @@ public void testRandomVersionSerialization() throws IOException {
                 // Versions before 8.8 don't support rank
                 searchRequest.source().rankBuilder(null);
             }
    -        if (version.before(TransportVersions.V_8_500_020) && searchRequest.source() != null) {
    +        if (version.before(TransportVersions.V_8_9_X) && searchRequest.source() != null) {
                 // Versions before 8_500_999 don't support queries
                 searchRequest.source().subSearches(new ArrayList<>());
             }
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java
    index e81d7a2246e03..0070d61a2adcb 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java
    @@ -60,6 +60,7 @@
     import static java.util.Collections.singletonList;
     import static org.elasticsearch.test.InternalAggregationTestCase.emptyReduceContextBuilder;
     import static org.hamcrest.Matchers.containsInAnyOrder;
    +import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.greaterThan;
     import static org.hamcrest.Matchers.greaterThanOrEqualTo;
     import static org.hamcrest.Matchers.lessThanOrEqualTo;
    @@ -394,7 +395,7 @@ public void testMergeCompletionSuggestions() throws InterruptedException {
                         i,
                         Collections.emptyMap()
                     );
    -                SearchHit hit = new SearchHit(docId);
    +                SearchHit hit = SearchHit.unpooled(docId);
                     ShardId shardId = new ShardId(
                         randomAlphaOfLengthBetween(5, 10),
                         randomAlphaOfLength(10),
    @@ -480,7 +481,7 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException
                         1F,
                         Collections.emptyMap()
                     );
    -                SearchHit searchHit = new SearchHit(docId);
    +                SearchHit searchHit = SearchHit.unpooled(docId);
                     searchHit.shard(
                         new SearchShardTarget(
                             "node",
    @@ -658,6 +659,7 @@ public void testMergeAggs() throws InterruptedException {
                         ShardSearchFailure.EMPTY_ARRAY,
                         SearchResponse.Clusters.EMPTY
                     );
    +
                     try {
                         addResponse(searchResponseMerger, searchResponse);
                     } finally {
    @@ -820,9 +822,11 @@ public void testMergeSearchHits() throws InterruptedException {
                         ShardSearchFailure.EMPTY_ARRAY,
                         SearchResponseTests.randomClusters()
                     );
    +
                     try {
                         addResponse(searchResponseMerger, searchResponse);
                     } finally {
    +                    searchHits.decRef();
                         searchResponse.decRef();
                     }
                 }
    @@ -969,6 +973,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() {
                     try {
                         merger.add(searchResponse);
                     } finally {
    +                    searchHits.decRef();
                         searchResponse.decRef();
                     }
                 }
    @@ -1119,6 +1124,443 @@ private static SearchHit[] randomSearchHitArray(
             return hits;
         }
     
    +    /**
    +     * Tests the partial results scenario used by MutableSearchResponse when
    +     * doing cross-cluster search with minimize_roundtrips=true
    +     */
    +    public void testPartialAggsMixedWithFullResponses() {
    +        String maxAggName = "max123";
    +        String rangeAggName = "range123";
    +
    +        // partial aggs from local cluster (no search hits)
    +        double value = 33.33;
    +        int count = 33;
    +        SearchResponse searchResponsePartialAggs = new SearchResponse(
    +            SearchHits.empty(new TotalHits(0L, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), Float.NaN),
    +            createDeterminsticAggregation(maxAggName, rangeAggName, value, count),
    +            null,
    +            false,
    +            null,
    +            null,
    +            1,
    +            null,
    +            2,
    +            2,
    +            0,
    +            33,
    +            ShardSearchFailure.EMPTY_ARRAY,
    +            SearchResponse.Clusters.EMPTY
    +        );
    +
    +        // full response from remote1 remote cluster
    +        value = 44.44;
    +        count = 44;
    +        String clusterAlias = "remote1";
    +        int total = 3;
    +        int successful = 2;
    +        int skipped = 1;
    +        Index[] indices = new Index[] { new Index("foo_idx", "1bba9f5b-c5a1-4664-be1b-26be590c1aff") };
    +        final SearchResponse searchResponseRemote1 = new SearchResponse(
    +            createSimpleDeterministicSearchHits(clusterAlias, indices),
    +            createDeterminsticAggregation(maxAggName, rangeAggName, value, count),
    +            null,
    +            false,
    +            null,
    +            null,
    +            1,
    +            null,
    +            total,
    +            successful,
    +            skipped,
    +            44,
    +            ShardSearchFailure.EMPTY_ARRAY,
    +            SearchResponse.Clusters.EMPTY
    +        );
    +
    +        // full response from remote2 remote cluster
    +        value = 55.55;
    +        count = 55;
    +        clusterAlias = "remote2";
    +        total = 3;
    +        successful = 2;
    +        skipped = 1;
    +        indices = new Index[] { new Index("foo_idx", "ae024679-097a-4a27-abf8-403f1e9189de") };
    +        SearchResponse searchResponseRemote2 = new SearchResponse(
    +            createSimpleDeterministicSearchHits(clusterAlias, indices),
    +            createDeterminsticAggregation(maxAggName, rangeAggName, value, count),
    +            null,
    +            false,
    +            null,
    +            null,
    +            1,
    +            null,
    +            total,
    +            successful,
    +            skipped,
    +            55,
    +            ShardSearchFailure.EMPTY_ARRAY,
    +            SearchResponse.Clusters.EMPTY
    +        );
    +        try {
    +            SearchResponse.Clusters clusters = SearchResponseTests.createCCSClusterObject(
    +                3,
    +                2,
    +                true,
    +                2,
    +                1,
    +                0,
    +                0,
    +                new ShardSearchFailure[0]
    +            );
    +
    +            // merge partial aggs with remote1, check, then merge in remote2, check
    +            try (
    +                SearchResponseMerger searchResponseMerger = new SearchResponseMerger(
    +                    0,
    +                    10,
    +                    10,
    +                    new SearchTimeProvider(0, 0, () -> 0),
    +                    emptyReduceContextBuilder(
    +                        new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName))
    +                            .addAggregator(new DateRangeAggregationBuilder(rangeAggName))
    +                    )
    +                )
    +            ) {
    +                searchResponseMerger.add(searchResponsePartialAggs);
    +                searchResponseMerger.add(searchResponseRemote1);
    +                SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters);
    +                try {
    +                    SearchHits hits = mergedResponse.getHits();
    +                    assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1
    +                    SearchHit hit1 = hits.getHits()[0];
    +                    String expectedHit1 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit1.toString(), expectedHit1);
    +
    +                    SearchHit hit2 = hits.getHits()[1];
    +                    String expectedHit2 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit2.toString(), expectedHit2);
    +
    +                    double expectedMaxValue = 44.44;  // value from remote1
    +                    long expectedBucketsDocCount = 33 + 44;
    +                    Max max = mergedResponse.getAggregations().get(maxAggName);
    +                    assertEquals(expectedMaxValue, max.value(), 0d);
    +                    Range range = mergedResponse.getAggregations().get(rangeAggName);
    +                    assertEquals(1, range.getBuckets().size());
    +                    Range.Bucket bucket = range.getBuckets().get(0);
    +                    assertEquals("0.0", bucket.getFromAsString());
    +                    assertEquals("10000.0", bucket.getToAsString());
    +                    assertEquals(expectedBucketsDocCount, bucket.getDocCount());
    +                } finally {
    +                    mergedResponse.decRef();
    +                }
    +
    +                searchResponseMerger.add(searchResponseRemote2);
    +                mergedResponse = searchResponseMerger.getMergedResponse(clusters);
    +                try {
    +                    SearchHits hits = mergedResponse.getHits();
    +                    assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2
    +
    +                    SearchHit hit1 = hits.getHits()[0];
    +                    String expectedHit1 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit1.toString(), expectedHit1);
    +
    +                    SearchHit hit2 = hits.getHits()[1];
    +                    String expectedHit2 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit2.toString(), expectedHit2);
    +
    +                    SearchHit hit3 = hits.getHits()[2];
    +                    String expectedHit3 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit3.toString(), expectedHit3);
    +
    +                    SearchHit hit4 = hits.getHits()[3];
    +                    String expectedHit4 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit4.toString(), expectedHit4);
    +
    +                    double expectedMaxValue = 55.55;  // value from remote2
    +                    long expectedBucketsDocCount = 33 + 44 + 55;
    +                    Max max = mergedResponse.getAggregations().get(maxAggName);
    +                    assertEquals(expectedMaxValue, max.value(), 0d);
    +                    Range range = mergedResponse.getAggregations().get(rangeAggName);
    +                    assertEquals(1, range.getBuckets().size());
    +                    Range.Bucket bucket = range.getBuckets().get(0);
    +                    assertEquals("0.0", bucket.getFromAsString());
    +                    assertEquals("10000.0", bucket.getToAsString());
    +                    assertEquals(expectedBucketsDocCount, bucket.getDocCount());
    +                } finally {
    +                    mergedResponse.decRef();
    +                }
    +            }
    +
    +            // merge remote1 and remote2, no partial aggs, check, then merge in partial aggs from local, check
    +            try (
    +                SearchResponseMerger searchResponseMerger = new SearchResponseMerger(
    +                    0,
    +                    10,
    +                    10,
    +                    new SearchTimeProvider(0, 0, () -> 0),
    +                    emptyReduceContextBuilder(
    +                        new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName))
    +                            .addAggregator(new DateRangeAggregationBuilder(rangeAggName))
    +                    )
    +                )
    +            ) {
    +                searchResponseMerger.add(searchResponseRemote2);
    +                searchResponseMerger.add(searchResponseRemote1);
    +                SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters);
    +                try {
    +                    SearchHits hits = mergedResponse.getHits();
    +                    SearchHit hit1 = hits.getHits()[0];
    +                    String expectedHit1 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit1.toString(), expectedHit1);
    +
    +                    SearchHit hit2 = hits.getHits()[1];
    +                    String expectedHit2 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit2.toString(), expectedHit2);
    +
    +                    SearchHit hit3 = hits.getHits()[2];
    +                    String expectedHit3 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit3.toString(), expectedHit3);
    +
    +                    SearchHit hit4 = hits.getHits()[3];
    +                    String expectedHit4 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit4.toString(), expectedHit4);
    +
    +                    double expectedMaxValue = 55.55;  // value from remote2
    +                    long expectedBucketsDocCount = 44 + 55; // missing 33 from local partial aggs
    +                    Max max = mergedResponse.getAggregations().get(maxAggName);
    +                    assertEquals(expectedMaxValue, max.value(), 0d);
    +                    Range range = mergedResponse.getAggregations().get(rangeAggName);
    +                    assertEquals(1, range.getBuckets().size());
    +                    Range.Bucket bucket = range.getBuckets().get(0);
    +                    assertEquals("0.0", bucket.getFromAsString());
    +                    assertEquals("10000.0", bucket.getToAsString());
    +                    assertEquals(expectedBucketsDocCount, bucket.getDocCount());
    +                } finally {
    +                    mergedResponse.decRef();
    +                }
    +
    +                searchResponseMerger.add(searchResponsePartialAggs);
    +                mergedResponse = searchResponseMerger.getMergedResponse(clusters);
    +                try {
    +                    SearchHits hits = mergedResponse.getHits();
    +                    assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2
    +
    +                    SearchHit hit1 = hits.getHits()[0];
    +                    String expectedHit1 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit1.toString(), expectedHit1);
    +
    +                    SearchHit hit2 = hits.getHits()[1];
    +                    String expectedHit2 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 2.0,
    +                          "sort" : [
    +                            2.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit2.toString(), expectedHit2);
    +
    +                    SearchHit hit3 = hits.getHits()[2];
    +                    String expectedHit3 = """
    +                        {
    +                          "_index" : "remote1:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit3.toString(), expectedHit3);
    +
    +                    SearchHit hit4 = hits.getHits()[3];
    +                    String expectedHit4 = """
    +                        {
    +                          "_index" : "remote2:foo_idx",
    +                          "_score" : 1.0,
    +                          "sort" : [
    +                            1.0
    +                          ]
    +                        }""";
    +                    assertEquals(hit4.toString(), expectedHit4);
    +
    +                    double expectedMaxValue = 55.55;  // value from remote2
    +                    long expectedBucketsDocCount = 33 + 44 + 55;  // contributions from all 3 search responses
    +                    Max max = mergedResponse.getAggregations().get(maxAggName);
    +                    assertEquals(expectedMaxValue, max.value(), 0d);
    +                    Range range = mergedResponse.getAggregations().get(rangeAggName);
    +                    assertEquals(1, range.getBuckets().size());
    +                    Range.Bucket bucket = range.getBuckets().get(0);
    +                    assertEquals("0.0", bucket.getFromAsString());
    +                    assertEquals("10000.0", bucket.getToAsString());
    +                    assertEquals(expectedBucketsDocCount, bucket.getDocCount());
    +                } finally {
    +                    mergedResponse.decRef();
    +                }
    +            }
    +        } finally {
    +            searchResponseRemote1.decRef();
    +            searchResponseRemote2.decRef();
    +            searchResponsePartialAggs.decRef();
    +        }
    +    }
    +
    +    private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) {
    +        TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO);
    +        final int numDocs = (int) totalHits.value;
    +        int scoreFactor = 1;
    +        float maxScore = numDocs;
    +        int numFields = 1;
    +        SortField[] sortFields = new SortField[numFields];
    +        sortFields[0] = SortField.FIELD_SCORE;
    +        PriorityQueue priorityQueue = new PriorityQueue<>(new SearchHitComparator(sortFields));
    +        SearchHit[] hits = deterministicSearchHitArray(numDocs, clusterAlias, indices, maxScore, scoreFactor, sortFields, priorityQueue);
    +
    +        return SearchHits.unpooled(hits, totalHits, maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore, sortFields, null, null);
    +    }
    +
    +    private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) {
    +        Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap());
    +        InternalDateRange.Factory factory = new InternalDateRange.Factory();
    +        InternalDateRange.Bucket bucket = factory.createBucket(
    +            "bucket",
    +            0D,
    +            10000D,
    +            count,
    +            InternalAggregations.EMPTY,
    +            false,
    +            DocValueFormat.RAW
    +        );
    +
    +        InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap());
    +        InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max));
    +        return aggs;
    +    }
    +
    +    private static SearchHit[] deterministicSearchHitArray(
    +        int numDocs,
    +        String clusterAlias,
    +        Index[] indices,
    +        float maxScore,
    +        int scoreFactor,
    +        SortField[] sortFields,
    +        PriorityQueue priorityQueue
    +    ) {
    +        SearchHit[] hits = new SearchHit[numDocs];
    +
    +        int[] sortFieldFactors = new int[sortFields == null ? 0 : sortFields.length];
    +        for (int j = 0; j < sortFieldFactors.length; j++) {
    +            sortFieldFactors[j] = 1;
    +        }
    +
    +        for (int j = 0; j < numDocs; j++) {
    +            ShardId shardId = new ShardId(randomFrom(indices), j);
    +            SearchShardTarget shardTarget = new SearchShardTarget("abc123", shardId, clusterAlias);
    +            SearchHit hit = SearchHit.unpooled(j);
    +
    +            float score = Float.NaN;
    +            if (Float.isNaN(maxScore) == false) {
    +                score = (maxScore - j) * scoreFactor;
    +                hit.score(score);
    +            }
    +
    +            hit.shard(shardTarget);
    +            if (sortFields != null) {
    +                Object[] rawSortValues = new Object[sortFields.length];
    +                DocValueFormat[] docValueFormats = new DocValueFormat[sortFields.length];
    +                for (int k = 0; k < sortFields.length; k++) {
    +                    SortField sortField = sortFields[k];
    +                    if (sortField == SortField.FIELD_SCORE) {
    +                        hit.score(score);
    +                        rawSortValues[k] = score;
    +                    } else {
    +                        rawSortValues[k] = sortField.getReverse() ? numDocs * sortFieldFactors[k] - j : j;
    +                    }
    +                    docValueFormats[k] = DocValueFormat.RAW;
    +                }
    +                hit.sortValues(rawSortValues, docValueFormats);
    +            }
    +            hits[j] = hit;
    +            priorityQueue.add(hit);
    +        }
    +        return hits;
    +    }
    +
         private static Map randomRealisticIndices(int numIndices, int numClusters) {
             String[] indicesNames = new String[numIndices];
             for (int i = 0; i < numIndices; i++) {
    diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
    index ef759279e095f..8cc19deee757e 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
    @@ -26,21 +26,16 @@
     import org.elasticsearch.search.SearchHitsTests;
     import org.elasticsearch.search.SearchModule;
     import org.elasticsearch.search.SearchResponseUtils;
    -import org.elasticsearch.search.aggregations.AggregationsTests;
    -import org.elasticsearch.search.aggregations.InternalAggregations;
     import org.elasticsearch.search.profile.SearchProfileResults;
     import org.elasticsearch.search.profile.SearchProfileResultsTests;
     import org.elasticsearch.search.suggest.Suggest;
     import org.elasticsearch.search.suggest.SuggestTests;
     import org.elasticsearch.test.ESTestCase;
    -import org.elasticsearch.test.InternalAggregationTestCase;
     import org.elasticsearch.xcontent.NamedXContentRegistry;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentBuilder;
     import org.elasticsearch.xcontent.XContentParser;
     import org.elasticsearch.xcontent.XContentType;
    -import org.junit.After;
    -import org.junit.Before;
     
     import java.io.IOException;
     import java.util.ArrayList;
    @@ -59,25 +54,13 @@ public class SearchResponseTests extends ESTestCase {
     
         private static final NamedXContentRegistry xContentRegistry;
         static {
    -        List namedXContents = new ArrayList<>(InternalAggregationTestCase.getDefaultNamedXContents());
    -        namedXContents.addAll(SuggestTests.getDefaultNamedXContents());
    +        List namedXContents = new ArrayList<>(SuggestTests.getDefaultNamedXContents());
             xContentRegistry = new NamedXContentRegistry(namedXContents);
         }
     
         private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
             new SearchModule(Settings.EMPTY, emptyList()).getNamedWriteables()
         );
    -    private AggregationsTests aggregationsTests = new AggregationsTests();
    -
    -    @Before
    -    public void init() throws Exception {
    -        aggregationsTests.init();
    -    }
    -
    -    @After
    -    public void cleanUp() throws Exception {
    -        aggregationsTests.cleanUp();
    -    }
     
         @Override
         protected NamedXContentRegistry xContentRegistry() {
    @@ -115,25 +98,28 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha
             }
             if (minimal == false) {
                 SearchHits hits = SearchHitsTests.createTestItem(true, true);
    -            InternalAggregations aggregations = aggregationsTests.createTestInstance();
    -            Suggest suggest = SuggestTests.createTestItem();
    -            SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem();
    -            return new SearchResponse(
    -                hits,
    -                aggregations,
    -                suggest,
    -                timedOut,
    -                terminatedEarly,
    -                profileResults,
    -                numReducePhases,
    -                null,
    -                totalShards,
    -                successfulShards,
    -                skippedShards,
    -                tookInMillis,
    -                shardSearchFailures,
    -                clusters
    -            );
    +            try {
    +                Suggest suggest = SuggestTests.createTestItem();
    +                SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem();
    +                return new SearchResponse(
    +                    hits,
    +                    null,
    +                    suggest,
    +                    timedOut,
    +                    terminatedEarly,
    +                    profileResults,
    +                    numReducePhases,
    +                    null,
    +                    totalShards,
    +                    successfulShards,
    +                    skippedShards,
    +                    tookInMillis,
    +                    shardSearchFailures,
    +                    clusters
    +                );
    +            } finally {
    +                hits.decRef();
    +            }
             } else {
                 return SearchResponseUtils.emptyWithTotalHits(
                     null,
    @@ -381,9 +367,10 @@ public void testToXContent() throws IOException {
             SearchHit hit = new SearchHit(1, "id1");
             hit.score(2.0f);
             SearchHit[] hits = new SearchHit[] { hit };
    +        var sHits = new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f);
             {
                 SearchResponse response = new SearchResponse(
    -                new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f),
    +                sHits,
                     null,
                     null,
                     false,
    @@ -425,7 +412,7 @@ public void testToXContent() throws IOException {
             }
             {
                 SearchResponse response = new SearchResponse(
    -                new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f),
    +                sHits,
                     null,
                     null,
                     false,
    @@ -475,7 +462,7 @@ public void testToXContent() throws IOException {
             }
             {
                 SearchResponse response = new SearchResponse(
    -                new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f),
    +                sHits,
                     null,
                     null,
                     false,
    @@ -617,6 +604,7 @@ public void testToXContent() throws IOException {
                     response.decRef();
                 }
             }
    +        sHits.decRef();
         }
     
         public void testSerialization() throws IOException {
    diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java
    index fb27d824417b1..d04e41c83699d 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java
    @@ -25,6 +25,7 @@
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.search.SearchResponseUtils;
     import org.elasticsearch.tasks.Task;
    +import org.elasticsearch.telemetry.metric.MeterRegistry;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.transport.Transport;
    @@ -52,7 +53,7 @@ public void testParentTaskId() throws Exception {
             Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build();
             ActionFilters actionFilters = mock(ActionFilters.class);
             when(actionFilters.filters()).thenReturn(new ActionFilter[0]);
    -        ThreadPool threadPool = new ThreadPool(settings);
    +        ThreadPool threadPool = new ThreadPool(settings, MeterRegistry.NOOP);
             try {
                 TransportService transportService = new TransportService(
                     Settings.EMPTY,
    @@ -120,7 +121,7 @@ public void testBatchExecute() throws ExecutionException, InterruptedException {
             Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build();
             ActionFilters actionFilters = mock(ActionFilters.class);
             when(actionFilters.filters()).thenReturn(new ActionFilter[0]);
    -        ThreadPool threadPool = new ThreadPool(settings);
    +        ThreadPool threadPool = new ThreadPool(settings, MeterRegistry.NOOP);
             TransportService transportService = new TransportService(
                 Settings.EMPTY,
                 mock(Transport.class),
    diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java
    index fea6e39ea881b..e0eed9daa97f6 100644
    --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java
    @@ -76,6 +76,7 @@
     import org.elasticsearch.search.vectors.KnnSearchBuilder;
     import org.elasticsearch.tasks.TaskId;
     import org.elasticsearch.telemetry.TelemetryProvider;
    +import org.elasticsearch.telemetry.metric.MeterRegistry;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.TransportVersionUtils;
     import org.elasticsearch.test.transport.MockTransportService;
    @@ -509,8 +510,12 @@ public void testCCSRemoteReduceMergeFails() throws Exception {
                     ActionListener.wrap(r -> fail("no response expected"), failure::set),
                     latch
                 );
    +
    +            TaskId parentTaskId = new TaskId("n", 1);
    +            SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                 TransportSearchAction.ccsRemoteReduce(
    -                new TaskId("n", 1),
    +                task,
    +                parentTaskId,
                     searchRequest,
                     localIndices,
                     remoteIndicesByCluster,
    @@ -566,6 +571,7 @@ public void testCCSRemoteReduce() throws Exception {
                 service.start();
                 service.acceptIncomingRequests();
                 RemoteClusterService remoteClusterService = service.getRemoteClusterService();
    +            // using from: 0 and size: 10
                 {
                     SearchRequest searchRequest = new SearchRequest();
                     final CountDownLatch latch = new CountDownLatch(1);
    @@ -578,8 +584,11 @@ public void testCCSRemoteReduce() throws Exception {
                         }),
                         latch
                     );
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                     TransportSearchAction.ccsRemoteReduce(
    -                    new TaskId("n", 1),
    +                    task,
    +                    parentTaskId,
                         searchRequest,
                         localIndices,
                         remoteIndicesByCluster,
    @@ -617,6 +626,93 @@ public void testCCSRemoteReduce() throws Exception {
                         searchResponse.decRef();
                     }
                 }
    +
    +            // using from: 5 and size: 6
    +            {
    +                SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().from(5).size(6);
    +                SearchRequest searchRequest = new SearchRequest(new String[] { "*", "*:*" }, sourceBuilder);
    +                final CountDownLatch latch = new CountDownLatch(1);
    +                SetOnce>> setOnce = new SetOnce<>();
    +                final SetOnce response = new SetOnce<>();
    +                LatchedActionListener listener = new LatchedActionListener<>(
    +                    ActionTestUtils.assertNoFailureListener(newValue -> {
    +                        newValue.incRef();
    +                        response.set(newValue);
    +                    }),
    +                    latch
    +                );
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
    +                TransportSearchAction.ccsRemoteReduce(
    +                    task,
    +                    parentTaskId,
    +                    searchRequest,
    +                    localIndices,
    +                    remoteIndicesByCluster,
    +                    new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()),
    +                    timeProvider,
    +                    emptyReduceContextBuilder(),
    +                    remoteClusterService,
    +                    threadPool,
    +                    listener,
    +                    (r, l) -> setOnce.set(Tuple.tuple(r, l))
    +                );
    +                if (localIndices == null) {
    +                    assertNull(setOnce.get());
    +                } else {
    +                    Tuple> tuple = setOnce.get();
    +                    assertEquals("", tuple.v1().getLocalClusterAlias());
    +                    assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class));
    +                    resolveWithEmptySearchResponse(tuple);
    +                }
    +                awaitLatch(latch, 5, TimeUnit.SECONDS);
    +
    +                SearchResponse searchResponse = response.get();
    +                try {
    +                    assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED));
    +                    assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING));
    +                    assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL));
    +                    assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED));
    +                    assertEquals(totalClusters, searchResponse.getClusters().getTotal());
    +                    assertEquals(
    +                        totalClusters,
    +                        searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)
    +                    );
    +                    assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases());
    +                } finally {
    +                    searchResponse.decRef();
    +                }
    +            }
    +
    +        } finally {
    +            for (MockTransportService mockTransportService : mockTransportServices) {
    +                mockTransportService.close();
    +            }
    +        }
    +    }
    +
    +    public void testCCSRemoteReduceWhereRemoteClustersFail() throws Exception {
    +        int numClusters = randomIntBetween(1, 10);
    +        DiscoveryNode[] nodes = new DiscoveryNode[numClusters];
    +        Map remoteIndicesByCluster = new HashMap<>();
    +        Settings.Builder builder = Settings.builder();
    +        MockTransportService[] mockTransportServices = startTransport(numClusters, nodes, remoteIndicesByCluster, builder);
    +        Settings settings = builder.build();
    +        boolean local = randomBoolean();
    +        OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null;
    +        TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0);
    +        try (
    +            MockTransportService service = MockTransportService.createNewService(
    +                settings,
    +                VersionInformation.CURRENT,
    +                TransportVersion.current(),
    +                threadPool,
    +                null
    +            )
    +        ) {
    +            service.start();
    +            service.acceptIncomingRequests();
    +            RemoteClusterService remoteClusterService = service.getRemoteClusterService();
                 {
                     SearchRequest searchRequest = new SearchRequest();
                     searchRequest.preference("index_not_found");
    @@ -627,8 +723,12 @@ public void testCCSRemoteReduce() throws Exception {
                         ActionListener.wrap(r -> fail("no response expected"), failure::set),
                         latch
                     );
    +
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                     TransportSearchAction.ccsRemoteReduce(
    -                    new TaskId("n", 1),
    +                    task,
    +                    parentTaskId,
                         searchRequest,
                         localIndices,
                         remoteIndicesByCluster,
    @@ -655,6 +755,37 @@ public void testCCSRemoteReduce() throws Exception {
                     assertEquals(RestStatus.NOT_FOUND, remoteTransportException.status());
                 }
     
    +        } finally {
    +            for (MockTransportService mockTransportService : mockTransportServices) {
    +                mockTransportService.close();
    +            }
    +        }
    +    }
    +
    +    public void testCCSRemoteReduceWithDisconnectedRemoteClusters() throws Exception {
    +        int numClusters = randomIntBetween(1, 10);
    +        DiscoveryNode[] nodes = new DiscoveryNode[numClusters];
    +        Map remoteIndicesByCluster = new HashMap<>();
    +        Settings.Builder builder = Settings.builder();
    +        MockTransportService[] mockTransportServices = startTransport(numClusters, nodes, remoteIndicesByCluster, builder);
    +        Settings settings = builder.build();
    +        boolean local = randomBoolean();
    +        OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null;
    +        int totalClusters = numClusters + (local ? 1 : 0);
    +        TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0);
    +        try (
    +            MockTransportService service = MockTransportService.createNewService(
    +                settings,
    +                VersionInformation.CURRENT,
    +                TransportVersion.current(),
    +                threadPool,
    +                null
    +            )
    +        ) {
    +            service.start();
    +            service.acceptIncomingRequests();
    +            RemoteClusterService remoteClusterService = service.getRemoteClusterService();
    +
                 int numDisconnectedClusters = randomIntBetween(1, numClusters);
                 Set disconnectedNodes = Sets.newHashSetWithExpectedSize(numDisconnectedClusters);
                 Set disconnectedNodesIndices = Sets.newHashSetWithExpectedSize(numDisconnectedClusters);
    @@ -687,8 +818,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti
                         ActionListener.wrap(r -> fail("no response expected"), failure::set),
                         latch
                     );
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                     TransportSearchAction.ccsRemoteReduce(
    -                    new TaskId("n", 1),
    +                    task,
    +                    parentTaskId,
                         searchRequest,
                         localIndices,
                         remoteIndicesByCluster,
    @@ -736,8 +870,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti
                     if (localIndices != null) {
                         clusterAliases.add("");
                     }
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                     TransportSearchAction.ccsRemoteReduce(
    -                    new TaskId("n", 1),
    +                    task,
    +                    parentTaskId,
                         searchRequest,
                         localIndices,
                         remoteIndicesByCluster,
    @@ -807,8 +944,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti
                     if (localIndices != null) {
                         clusterAliases.add("");
                     }
    +                TaskId parentTaskId = new TaskId("n", 1);
    +                SearchTask task = new SearchTask(2, "search", "search", () -> "desc", parentTaskId, Collections.emptyMap());
                     TransportSearchAction.ccsRemoteReduce(
    -                    new TaskId("n", 1),
    +                    task,
    +                    parentTaskId,
                         searchRequest,
                         localIndices,
                         remoteIndicesByCluster,
    @@ -1557,7 +1697,7 @@ public void testCCSCompatibilityCheck() throws Exception {
             ActionFilters actionFilters = mock(ActionFilters.class);
             when(actionFilters.filters()).thenReturn(new ActionFilter[0]);
             TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true);
    -        ThreadPool threadPool = new ThreadPool(settings);
    +        ThreadPool threadPool = new ThreadPool(settings, MeterRegistry.NOOP);
             try {
                 TransportService transportService = MockTransportService.createNewService(
                     Settings.EMPTY,
    diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java
    index 8062bfea5a637..a199003fc59c4 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java
    @@ -41,7 +41,7 @@ protected Collection> getPlugins() {
             return List.of(TestPlugin.class);
         }
     
    -    static final ActionType TYPE = ActionType.localOnly("test:action");
    +    static final ActionType TYPE = new ActionType<>("test:action");
     
         public void testAsyncActionFilterRefCounting() {
             final var countDownLatch = new CountDownLatch(2);
    diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
    index 64ab7a9819190..82c204b1d0b88 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
    @@ -20,6 +20,7 @@
     import org.elasticsearch.node.Node;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.tasks.TaskManager;
    +import org.elasticsearch.telemetry.metric.MeterRegistry;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.threadpool.ThreadPool;
     import org.junit.After;
    @@ -50,7 +51,10 @@ public class TransportActionFilterChainTests extends ESTestCase {
         @Before
         public void init() throws Exception {
             counter = new AtomicInteger();
    -        threadPool = new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "TransportActionFilterChainTests").build());
    +        threadPool = new ThreadPool(
    +            Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "TransportActionFilterChainTests").build(),
    +            MeterRegistry.NOOP
    +        );
         }
     
         @After
    diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java
    index a0d0b1809e1f7..7eccd65dfea8c 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java
    @@ -48,7 +48,7 @@
     
     import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
     import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
     import static org.elasticsearch.test.ClusterServiceUtils.setState;
     import static org.hamcrest.Matchers.containsString;
    @@ -308,15 +308,12 @@ public void testInvalidNodes() throws Exception {
             IndexShardRoutingTable.Builder wrongRoutingTableBuilder = new IndexShardRoutingTable.Builder(shardId);
             for (int i = 0; i < routingTable.size(); i++) {
                 ShardRouting shardRouting = routingTable.shard(i);
    -            ShardRouting wrongShardRouting = newShardRouting(
    -                shardId,
    -                shardRouting.currentNodeId() + randomIntBetween(10, 100),
    -                shardRouting.relocatingNodeId(),
    -                shardRouting.primary(),
    -                shardRouting.state(),
    -                shardRouting.unassignedInfo(),
    -                shardRouting.role()
    -            );
    +            String currentNodeId = shardRouting.currentNodeId() + randomIntBetween(10, 100);
    +            ShardRouting wrongShardRouting = shardRoutingBuilder(shardId, currentNodeId, shardRouting.primary(), shardRouting.state())
    +                .withRelocatingNodeId(shardRouting.relocatingNodeId())
    +                .withUnassignedInfo(shardRouting.unassignedInfo())
    +                .withRole(shardRouting.role())
    +                .build();
                 wrongRoutingTableBuilder.addShard(wrongShardRouting);
             }
             IndexShardRoutingTable wrongRoutingTable = wrongRoutingTableBuilder.build();
    diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
    index 4d26ae610da3c..689040f9b6c54 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
    @@ -139,6 +139,7 @@ public void testResponseAggregation() {
                     successfulNodes.add(capturedRequest.node());
                     final var response = new TestNodeResponse(capturedRequest.node());
                     transport.handleResponse(capturedRequest.requestId(), response);
    +                response.decRef();
                     assertFalse(response.hasReferences()); // response is copied (via the wire protocol) so this instance is released
                 } else {
                     failedNodeIds.add(capturedRequest.node().getId());
    diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
    index 8bda62b91bc7e..90c118fa355f0 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
    @@ -12,7 +12,6 @@
     import org.elasticsearch.action.NoShardAvailableActionException;
     import org.elasticsearch.action.UnavailableShardsException;
     import org.elasticsearch.action.admin.indices.flush.FlushRequest;
    -import org.elasticsearch.action.admin.indices.flush.FlushResponse;
     import org.elasticsearch.action.admin.indices.flush.TransportFlushAction;
     import org.elasticsearch.action.support.ActionFilters;
     import org.elasticsearch.action.support.ActionTestUtils;
    @@ -20,6 +19,7 @@
     import org.elasticsearch.action.support.PlainActionFuture;
     import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse;
     import org.elasticsearch.action.support.broadcast.BroadcastRequest;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.cluster.ClusterState;
     import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    @@ -164,7 +164,7 @@ public void testStartedPrimary() throws InterruptedException, ExecutionException
             ActionTestUtils.execute(broadcastReplicationAction, null, new DummyBroadcastRequest(index), response);
             for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) {
                 ReplicationResponse replicationResponse = new ReplicationResponse();
    -            replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(1, 1));
    +            replicationResponse.setShardInfo(ReplicationResponse.ShardInfo.allSuccessful(1));
                 shardRequests.v2().onResponse(replicationResponse);
             }
             logger.info("total shards: {}, ", response.get().getTotalShards());
    @@ -198,7 +198,7 @@ public void testResultCombine() throws InterruptedException, ExecutionException,
                         );
                         failed++;
                     }
    -                replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(2, shardsSucceeded, failures));
    +                replicationResponse.setShardInfo(ReplicationResponse.ShardInfo.of(2, shardsSucceeded, failures));
                     shardRequests.v2().onResponse(replicationResponse);
                 } else {
                     // sometimes fail
    @@ -286,9 +286,9 @@ protected void shardExecute(
             }
         }
     
    -    public FlushResponse assertImmediateResponse(String index, TransportFlushAction flushAction) {
    +    public BroadcastResponse assertImmediateResponse(String index, TransportFlushAction flushAction) {
             Date beginDate = new Date();
    -        FlushResponse flushResponse = ActionTestUtils.executeBlocking(flushAction, new FlushRequest(index));
    +        BroadcastResponse flushResponse = ActionTestUtils.executeBlocking(flushAction, new FlushRequest(index));
             Date endDate = new Date();
             long maxTime = 500;
             assertThat(
    diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java
    index 7684281a6a563..efc506ec51301 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java
    @@ -35,34 +35,35 @@ public class ReplicationResponseTests extends ESTestCase {
         public void testShardInfoToString() {
             final int total = 5;
             final int successful = randomIntBetween(1, total);
    -        final ShardInfo shardInfo = new ShardInfo(total, successful);
    +        final ShardInfo shardInfo = ShardInfo.of(total, successful);
             assertEquals(Strings.format("ShardInfo{total=5, successful=%d, failures=[]}", successful), shardInfo.toString());
         }
     
         public void testShardInfoToXContent() throws IOException {
             {
    -            ShardInfo shardInfo = new ShardInfo(5, 3);
    +            ShardInfo shardInfo = ShardInfo.of(5, 3);
                 String output = Strings.toString(shardInfo);
                 assertEquals("{\"total\":5,\"successful\":3,\"failed\":0}", output);
             }
             {
    -            ShardInfo shardInfo = new ShardInfo(
    +            ShardInfo shardInfo = ShardInfo.of(
                     6,
                     4,
    -                new ShardInfo.Failure(
    -                    new ShardId("index", "_uuid", 3),
    -                    "_node_id",
    -                    new IllegalArgumentException("Wrong"),
    -                    RestStatus.BAD_REQUEST,
    -                    false
    -                ),
    -                new ShardInfo.Failure(
    -                    new ShardId("index", "_uuid", 1),
    -                    "_node_id",
    -                    new CircuitBreakingException("Wrong", 12, 21, CircuitBreaker.Durability.PERMANENT),
    -                    RestStatus.NOT_ACCEPTABLE,
    -                    true
    -                )
    +                new ShardInfo.Failure[] {
    +                    new ShardInfo.Failure(
    +                        new ShardId("index", "_uuid", 3),
    +                        "_node_id",
    +                        new IllegalArgumentException("Wrong"),
    +                        RestStatus.BAD_REQUEST,
    +                        false
    +                    ),
    +                    new ShardInfo.Failure(
    +                        new ShardId("index", "_uuid", 1),
    +                        "_node_id",
    +                        new CircuitBreakingException("Wrong", 12, 21, CircuitBreaker.Durability.PERMANENT),
    +                        RestStatus.NOT_ACCEPTABLE,
    +                        true
    +                    ) }
                 );
                 String output = Strings.toString(shardInfo);
                 assertEquals(XContentHelper.stripWhitespace("""
    diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
    index 1c857d5554113..9b8b501912bac 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
    @@ -1468,7 +1468,7 @@ static class TestResponse extends ReplicationResponse {
             }
     
             TestResponse() {
    -            setShardInfo(new ShardInfo());
    +            setShardInfo(ReplicationResponse.ShardInfo.EMPTY);
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
    index e687b3b1c377f..d0ae26f97917a 100644
    --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
    @@ -68,7 +68,7 @@
     import static java.util.Collections.emptySet;
     import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_CREATION_DATE;
     import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
     import static org.elasticsearch.test.ClusterServiceUtils.setState;
     import static org.hamcrest.Matchers.allOf;
    @@ -120,13 +120,9 @@ public void setUp() throws Exception {
             state.nodes(DiscoveryNodes.builder().add(node1).add(node2).localNodeId(node1.getId()).masterNodeId(node1.getId()));
     
             shardId = new ShardId("index", UUID.randomUUID().toString(), 0);
    -        ShardRouting shardRouting = newShardRouting(
    -            shardId,
    -            node1.getId(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    +        ShardRouting shardRouting = shardRoutingBuilder(shardId, node1.getId(), true, ShardRoutingState.INITIALIZING).withRecoverySource(
                 RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        ).build();
     
             Settings indexSettings = indexSettings(IndexVersion.current(), 1, 1).put(SETTING_INDEX_UUID, shardId.getIndex().getUUID())
                 .put(SETTING_CREATION_DATE, System.currentTimeMillis())
    @@ -181,7 +177,7 @@ private TransportResponseHandler get
                             }
     
                             @Override
    -                        public Executor executor(ThreadPool threadPool) {
    +                        public Executor executor() {
                                 return TransportResponseHandler.TRANSPORT_WORKER;
                             }
     
    diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
    index 735ae41558240..7ee4d2d6bba9b 100644
    --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
    @@ -55,6 +55,7 @@
     import static org.hamcrest.CoreMatchers.not;
     import static org.hamcrest.CoreMatchers.nullValue;
     import static org.hamcrest.Matchers.contains;
    +import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.instanceOf;
     import static org.hamcrest.Matchers.notNullValue;
    @@ -516,6 +517,27 @@ public void testToValidateUpsertRequestWithVersion() {
             assertThat(updateRequest.validate().validationErrors(), contains("can't provide version in upsert request"));
         }
     
    +    public void testUpdatingRejectsLongIds() {
    +        String id = randomAlphaOfLength(511);
    +        UpdateRequest request = new UpdateRequest("index", id);
    +        request.doc("{}", XContentType.JSON);
    +        ActionRequestValidationException validate = request.validate();
    +        assertNull(validate);
    +
    +        id = randomAlphaOfLength(512);
    +        request = new UpdateRequest("index", id);
    +        request.doc("{}", XContentType.JSON);
    +        validate = request.validate();
    +        assertNull(validate);
    +
    +        id = randomAlphaOfLength(513);
    +        request = new UpdateRequest("index", id);
    +        request.doc("{}", XContentType.JSON);
    +        validate = request.validate();
    +        assertThat(validate, notNullValue());
    +        assertThat(validate.getMessage(), containsString("id [" + id + "] is too long, must be no longer than 512 bytes but was: 513"));
    +    }
    +
         public void testValidate() {
             {
                 UpdateRequest request = new UpdateRequest("index", "id");
    diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
    index f8875e41e3e3d..05c974ea9d4d3 100644
    --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
    @@ -62,7 +62,7 @@ public void testToXContent() throws IOException {
             }
             {
                 UpdateResponse updateResponse = new UpdateResponse(
    -                new ReplicationResponse.ShardInfo(10, 6),
    +                ReplicationResponse.ShardInfo.of(10, 6),
                     new ShardId("index", "index_uuid", 1),
                     "id",
                     3,
    @@ -94,7 +94,7 @@ public void testToXContent() throws IOException {
                 fields.put("isbn", new DocumentField("isbn", Collections.singletonList("ABC-123")));
     
                 UpdateResponse updateResponse = new UpdateResponse(
    -                new ReplicationResponse.ShardInfo(3, 2),
    +                ReplicationResponse.ShardInfo.of(3, 2),
                     new ShardId("books", "books_uuid", 2),
                     "1",
                     7,
    diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
    index 5175fee7edceb..97c52ef2edc37 100644
    --- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
    +++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
    @@ -26,6 +26,7 @@
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.util.concurrent.ThreadContext;
     import org.elasticsearch.env.Environment;
    +import org.elasticsearch.telemetry.metric.MeterRegistry;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.xcontent.XContentType;
    @@ -76,7 +77,7 @@ public void setUp() throws Exception {
                 .put("node.name", "test-" + getTestName())
                 .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
                 .build();
    -        threadPool = new ThreadPool(settings);
    +        threadPool = new ThreadPool(settings, MeterRegistry.NOOP);
             client = buildClient(settings, ACTIONS);
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java
    index 0100c7cab5ba4..9bda2f064072c 100644
    --- a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java
    +++ b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java
    @@ -12,20 +12,22 @@
     import org.elasticsearch.action.ActionRequest;
     import org.elasticsearch.action.ActionResponse;
     import org.elasticsearch.action.ActionType;
    +import org.elasticsearch.action.RemoteClusterActionType;
    +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
    +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
    +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
     import org.elasticsearch.action.bulk.BulkRequest;
     import org.elasticsearch.action.search.ClearScrollRequest;
     import org.elasticsearch.action.search.SearchRequest;
    +import org.elasticsearch.action.support.PlainActionFuture;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
     import org.elasticsearch.tasks.TaskId;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.client.NoOpClient;
    +import org.elasticsearch.transport.TransportResponse;
     
     import java.util.concurrent.Executor;
     
    -import static org.hamcrest.Matchers.instanceOf;
    -import static org.hamcrest.Matchers.is;
    -import static org.mockito.Mockito.mock;
    -
     public class ParentTaskAssigningClientTests extends ESTestCase {
         public void testSetsParentId() {
             TaskId[] parentTaskId = new TaskId[] { new TaskId(randomAlphaOfLength(3), randomLong()) };
    @@ -65,15 +67,31 @@ public void testRemoteClientIsAlsoAParentAssigningClient() {
             try (var threadPool = createThreadPool()) {
                 final var mockClient = new NoOpClient(threadPool) {
                     @Override
    -                public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    -                    return mock(Client.class);
    +                public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) {
    +                    return new RemoteClusterClient() {
    +                        @Override
    +                        public  void execute(
    +                            RemoteClusterActionType action,
    +                            Request request,
    +                            ActionListener listener
    +                        ) {
    +                            assertSame(parentTaskId, request.getParentTask());
    +                            listener.onFailure(new UnsupportedOperationException("fake remote-cluster client"));
    +                        }
    +                    };
                     }
                 };
     
                 final var client = new ParentTaskAssigningClient(mockClient, parentTaskId);
    -            assertThat(
    -                client.getRemoteClusterClient("remote-cluster", EsExecutors.DIRECT_EXECUTOR_SERVICE),
    -                is(instanceOf(ParentTaskAssigningClient.class))
    +            final var remoteClusterClient = client.getRemoteClusterClient("remote-cluster", EsExecutors.DIRECT_EXECUTOR_SERVICE);
    +            assertEquals(
    +                "fake remote-cluster client",
    +                expectThrows(
    +                    UnsupportedOperationException.class,
    +                    () -> PlainActionFuture.get(
    +                        fut -> remoteClusterClient.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(), fut)
    +                    )
    +                ).getMessage()
                 );
             }
         }
    diff --git a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
    index c051b8314d7b3..9aea310180410 100644
    --- a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
    +++ b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java
    @@ -16,7 +16,6 @@
     import org.elasticsearch.action.support.TransportAction;
     import org.elasticsearch.client.internal.AbstractClientHeadersTestCase;
     import org.elasticsearch.client.internal.Client;
    -import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.tasks.Task;
     import org.elasticsearch.tasks.TaskManager;
    @@ -24,7 +23,6 @@
     
     import java.util.Collections;
     import java.util.HashMap;
    -import java.util.List;
     
     import static org.mockito.Mockito.mock;
     
    @@ -38,14 +36,7 @@ protected Client buildClient(Settings headersSettings, ActionType[] testedAct
             TaskManager taskManager = new TaskManager(settings, threadPool, Collections.emptySet());
             Actions actions = new Actions(testedActions, taskManager);
             NodeClient client = new NodeClient(settings, threadPool);
    -        client.initialize(
    -            actions,
    -            taskManager,
    -            () -> "test",
    -            mock(Transport.Connection.class),
    -            null,
    -            new NamedWriteableRegistry(List.of())
    -        );
    +        client.initialize(actions, taskManager, () -> "test", mock(Transport.Connection.class), null);
             return client;
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
    index 75439578448a4..5e122c4050b6c 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
    @@ -36,38 +36,38 @@
     public class DiskUsageTests extends ESTestCase {
         public void testDiskUsageCalc() {
             DiskUsage du = new DiskUsage("node1", "n1", "random", 100, 40);
    -        assertThat(du.getFreeDiskAsPercentage(), equalTo(40.0));
    -        assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - 40.0));
    -        assertThat(du.getFreeBytes(), equalTo(40L));
    -        assertThat(du.getUsedBytes(), equalTo(60L));
    -        assertThat(du.getTotalBytes(), equalTo(100L));
    +        assertThat(du.freeDiskAsPercentage(), equalTo(40.0));
    +        assertThat(du.usedDiskAsPercentage(), equalTo(100.0 - 40.0));
    +        assertThat(du.freeBytes(), equalTo(40L));
    +        assertThat(du.usedBytes(), equalTo(60L));
    +        assertThat(du.totalBytes(), equalTo(100L));
     
             DiskUsage du2 = new DiskUsage("node1", "n1", "random", 100, 55);
    -        assertThat(du2.getFreeDiskAsPercentage(), equalTo(55.0));
    -        assertThat(du2.getUsedDiskAsPercentage(), equalTo(45.0));
    -        assertThat(du2.getFreeBytes(), equalTo(55L));
    -        assertThat(du2.getUsedBytes(), equalTo(45L));
    -        assertThat(du2.getTotalBytes(), equalTo(100L));
    +        assertThat(du2.freeDiskAsPercentage(), equalTo(55.0));
    +        assertThat(du2.usedDiskAsPercentage(), equalTo(45.0));
    +        assertThat(du2.freeBytes(), equalTo(55L));
    +        assertThat(du2.usedBytes(), equalTo(45L));
    +        assertThat(du2.totalBytes(), equalTo(100L));
     
             // Test that DiskUsage handles invalid numbers, as reported by some
             // filesystems (ZFS & NTFS)
             DiskUsage du3 = new DiskUsage("node1", "n1", "random", 100, 101);
    -        assertThat(du3.getFreeDiskAsPercentage(), equalTo(101.0));
    -        assertThat(du3.getFreeBytes(), equalTo(101L));
    -        assertThat(du3.getUsedBytes(), equalTo(-1L));
    -        assertThat(du3.getTotalBytes(), equalTo(100L));
    +        assertThat(du3.freeDiskAsPercentage(), equalTo(101.0));
    +        assertThat(du3.freeBytes(), equalTo(101L));
    +        assertThat(du3.usedBytes(), equalTo(-1L));
    +        assertThat(du3.totalBytes(), equalTo(100L));
     
             DiskUsage du4 = new DiskUsage("node1", "n1", "random", -1, -1);
    -        assertThat(du4.getFreeDiskAsPercentage(), equalTo(100.0));
    -        assertThat(du4.getFreeBytes(), equalTo(-1L));
    -        assertThat(du4.getUsedBytes(), equalTo(0L));
    -        assertThat(du4.getTotalBytes(), equalTo(-1L));
    +        assertThat(du4.freeDiskAsPercentage(), equalTo(100.0));
    +        assertThat(du4.freeBytes(), equalTo(-1L));
    +        assertThat(du4.usedBytes(), equalTo(0L));
    +        assertThat(du4.totalBytes(), equalTo(-1L));
     
             DiskUsage du5 = new DiskUsage("node1", "n1", "random", 0, 0);
    -        assertThat(du5.getFreeDiskAsPercentage(), equalTo(100.0));
    -        assertThat(du5.getFreeBytes(), equalTo(0L));
    -        assertThat(du5.getUsedBytes(), equalTo(0L));
    -        assertThat(du5.getTotalBytes(), equalTo(0L));
    +        assertThat(du5.freeDiskAsPercentage(), equalTo(100.0));
    +        assertThat(du5.freeBytes(), equalTo(0L));
    +        assertThat(du5.usedBytes(), equalTo(0L));
    +        assertThat(du5.totalBytes(), equalTo(0L));
         }
     
         public void testRandomDiskUsage() {
    @@ -77,17 +77,17 @@ public void testRandomDiskUsage() {
                 long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
                 DiskUsage du = new DiskUsage("random", "random", "random", total, free);
                 if (total == 0) {
    -                assertThat(du.getFreeBytes(), equalTo(free));
    -                assertThat(du.getTotalBytes(), equalTo(0L));
    -                assertThat(du.getUsedBytes(), equalTo(-free));
    -                assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0));
    -                assertThat(du.getUsedDiskAsPercentage(), equalTo(0.0));
    +                assertThat(du.freeBytes(), equalTo(free));
    +                assertThat(du.totalBytes(), equalTo(0L));
    +                assertThat(du.usedBytes(), equalTo(-free));
    +                assertThat(du.freeDiskAsPercentage(), equalTo(100.0));
    +                assertThat(du.usedDiskAsPercentage(), equalTo(0.0));
                 } else {
    -                assertThat(du.getFreeBytes(), equalTo(free));
    -                assertThat(du.getTotalBytes(), equalTo(total));
    -                assertThat(du.getUsedBytes(), equalTo(total - free));
    -                assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0 * free / total));
    -                assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - (100.0 * free / total)));
    +                assertThat(du.freeBytes(), equalTo(free));
    +                assertThat(du.totalBytes(), equalTo(total));
    +                assertThat(du.usedBytes(), equalTo(total - free));
    +                assertThat(du.freeDiskAsPercentage(), equalTo(100.0 * free / total));
    +                assertThat(du.usedDiskAsPercentage(), equalTo(100.0 - (100.0 * free / total)));
                 }
             }
         }
    @@ -347,9 +347,9 @@ public void testLeastAndMostAvailableDiskSpaceSomeInvalidValues() {
         private void assertDiskUsage(DiskUsage usage, FsInfo.Path path) {
             assertNotNull(usage);
             assertNotNull(path);
    -        assertEquals(usage.toString(), usage.getPath(), path.getPath());
    -        assertEquals(usage.toString(), usage.getTotalBytes(), path.getTotal().getBytes());
    -        assertEquals(usage.toString(), usage.getFreeBytes(), path.getAvailable().getBytes());
    +        assertEquals(usage.toString(), usage.path(), path.getPath());
    +        assertEquals(usage.toString(), usage.totalBytes(), path.getTotal().getBytes());
    +        assertEquals(usage.toString(), usage.freeBytes(), path.getAvailable().getBytes());
     
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java
    index 395dde29597d3..ae557b1b418da 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java
    @@ -42,6 +42,7 @@
     
     import static java.util.Collections.emptyList;
     import static java.util.Collections.emptySet;
    +import static java.util.Collections.singleton;
     import static java.util.Collections.singletonList;
     import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.BOOTSTRAP_PLACEHOLDER_PREFIX;
     import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING;
    @@ -91,6 +92,7 @@ public void testScheduling() {
                         clusterState,
                         emptyList(),
                         emptyList(),
    +                    emptySet(),
                         0L,
                         electionStrategy,
                         new StatusInfo(HEALTHY, "healthy-info"),
    @@ -189,6 +191,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     15L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -207,6 +210,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
                     clusterState,
                     singletonList(otherAddress),
                     emptyList(),
    +                emptySet(),
                     16L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -226,6 +230,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
                     clusterState,
                     emptyList(),
                     singletonList(otherNode),
    +                emptySet(),
                     17L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -253,6 +258,7 @@ public void testDescriptionOnUnhealthyNodes() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     15L,
                     electionStrategy,
                     new StatusInfo(UNHEALTHY, "unhealthy-info"),
    @@ -273,6 +279,7 @@ public void testDescriptionOnUnhealthyNodes() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     15L,
                     electionStrategy,
                     new StatusInfo(UNHEALTHY, "unhealthy-info"),
    @@ -296,6 +303,7 @@ public void testDescriptionBeforeBootstrapping() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     1L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -317,6 +325,7 @@ public void testDescriptionBeforeBootstrapping() {
                     clusterState,
                     singletonList(otherAddress),
                     emptyList(),
    +                emptySet(),
                     2L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -340,6 +349,7 @@ public void testDescriptionBeforeBootstrapping() {
                     clusterState,
                     emptyList(),
                     singletonList(otherNode),
    +                emptySet(),
                     3L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -362,6 +372,7 @@ public void testDescriptionBeforeBootstrapping() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     4L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -414,6 +425,7 @@ public void testDescriptionAfterDetachCluster() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -435,6 +447,7 @@ public void testDescriptionAfterDetachCluster() {
                     clusterState,
                     singletonList(otherAddress),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -458,6 +471,7 @@ public void testDescriptionAfterDetachCluster() {
                     clusterState,
                     emptyList(),
                     singletonList(otherNode),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -481,6 +495,7 @@ public void testDescriptionAfterDetachCluster() {
                     clusterState,
                     emptyList(),
                     singletonList(yetAnotherNode),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -510,6 +525,7 @@ public void testDescriptionAfterBootstrapping() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -531,6 +547,7 @@ public void testDescriptionAfterBootstrapping() {
                     clusterState,
                     singletonList(otherAddress),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -554,6 +571,7 @@ public void testDescriptionAfterBootstrapping() {
                     clusterState,
                     emptyList(),
                     singletonList(otherNode),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -577,6 +595,7 @@ public void testDescriptionAfterBootstrapping() {
                     clusterState,
                     emptyList(),
                     singletonList(yetAnotherNode),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -593,12 +612,39 @@ public void testDescriptionAfterBootstrapping() {
                 )
             );
     
    +        final DiscoveryNode recentMaster = makeDiscoveryNode("recentMaster");
    +        assertThat(
    +            new ClusterFormationState(
    +                Settings.EMPTY,
    +                clusterState,
    +                emptyList(),
    +                singletonList(yetAnotherNode),
    +                singleton(recentMaster),
    +                0L,
    +                electionStrategy,
    +                new StatusInfo(HEALTHY, "healthy-info"),
    +                emptyList()
    +            ).getDescription(),
    +            is(
    +                "master not discovered or elected yet, an election requires a node with id [otherNode], "
    +                    + "have only discovered non-quorum ["
    +                    + noAttr(yetAnotherNode)
    +                    + "] who claim current master to be ["
    +                    + noAttr(recentMaster)
    +                    + "]; "
    +                    + "discovery will continue using [] from hosts providers and ["
    +                    + noAttr(localNode)
    +                    + "] from last-known cluster state; node term 0, last-accepted version 0 in term 0"
    +            )
    +        );
    +
             assertThat(
                 new ClusterFormationState(
                     Settings.EMPTY,
                     state(localNode, "n1", "n2"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -619,6 +665,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", "n3"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -639,6 +686,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", BOOTSTRAP_PLACEHOLDER_PREFIX + "n3"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -659,6 +707,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", "n3", "n4"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -679,6 +728,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", "n3", "n4", "n5"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -699,6 +749,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", "n3", "n4", BOOTSTRAP_PLACEHOLDER_PREFIX + "n5"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -719,6 +770,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, "n1", "n2", "n3", BOOTSTRAP_PLACEHOLDER_PREFIX + "n4", BOOTSTRAP_PLACEHOLDER_PREFIX + "n5"),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -739,6 +791,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, new String[] { "n1" }, new String[] { "n1" }),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -759,6 +812,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, new String[] { "n1" }, new String[] { "n2" }),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -779,6 +833,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, new String[] { "n1" }, new String[] { "n2", "n3" }),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -799,6 +854,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, new String[] { "n1" }, new String[] { "n2", "n3", "n4" }),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -841,6 +897,7 @@ public void testDescriptionAfterBootstrapping() {
                     stateWithOtherNodes,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -875,6 +932,7 @@ public void testDescriptionAfterBootstrapping() {
                     state(localNode, GatewayMetaState.STALE_STATE_CONFIG_NODE_ID),
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -910,6 +968,7 @@ public void testJoinStatusReporting() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -924,6 +983,7 @@ public void testJoinStatusReporting() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -970,6 +1030,7 @@ public void testJoinStatusReporting() {
                     clusterState,
                     emptyList(),
                     emptyList(),
    +                emptySet(),
                     0L,
                     electionStrategy,
                     new StatusInfo(HEALTHY, "healthy-info"),
    @@ -987,6 +1048,10 @@ public void testClusterFormationStateSerialization() {
                 DiscoveryNodeUtils.create(UUID.randomUUID().toString()),
                 DiscoveryNodeUtils.create(UUID.randomUUID().toString())
             );
    +        Set mastersOfPeers = Set.of(
    +            DiscoveryNodeUtils.create(UUID.randomUUID().toString()),
    +            DiscoveryNodeUtils.create(UUID.randomUUID().toString())
    +        );
             List joinStatuses = List.of(
                 new JoinStatus(
                     DiscoveryNodeUtils.create(UUID.randomUUID().toString()),
    @@ -1001,6 +1066,7 @@ public void testClusterFormationStateSerialization() {
                 state(localNode, new String[] { "n1" }, new String[] { "n2", "n3", "n4" }),
                 resolvedAddresses,
                 foundPeers,
    +            mastersOfPeers,
                 0L,
                 electionStrategy,
                 new StatusInfo(HEALTHY, "healthy-info"),
    @@ -1035,6 +1101,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
             final DiscoveryNode localNode = originalClusterFormationState.localNode();
             List resolvedAddresses = originalClusterFormationState.resolvedAddresses();
             List foundPeers = originalClusterFormationState.foundPeers();
    +        Set mastersOfPeers = originalClusterFormationState.mastersOfPeers();
             long currentTerm = originalClusterFormationState.currentTerm();
             StatusInfo statusInfo = originalClusterFormationState.statusInfo();
             List joinStatuses = originalClusterFormationState.inFlightJoinStatuses();
    @@ -1043,13 +1110,14 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
                 originalClusterFormationState.lastAcceptedConfiguration(),
                 originalClusterFormationState.lastCommittedConfiguration()
             );
    -        switch (randomIntBetween(1, 5)) {
    +        switch (randomIntBetween(1, 6)) {
                 case 1 -> {
                     return new ClusterFormationState(
                         settings,
                         clusterState,
                         resolvedAddresses,
                         foundPeers,
    +                    mastersOfPeers,
                         currentTerm + 1,
                         electionStrategy,
                         statusInfo,
    @@ -1064,6 +1132,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
                         clusterState,
                         resolvedAddresses,
                         newFoundPeers,
    +                    mastersOfPeers,
                         currentTerm,
                         electionStrategy,
                         statusInfo,
    @@ -1085,6 +1154,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
                         clusterState,
                         resolvedAddresses,
                         foundPeers,
    +                    mastersOfPeers,
                         currentTerm,
                         electionStrategy,
                         statusInfo,
    @@ -1098,6 +1168,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
                         clusterState,
                         resolvedAddresses,
                         foundPeers,
    +                    mastersOfPeers,
                         currentTerm,
                         electionStrategy,
                         newStatusInfo,
    @@ -1110,6 +1181,26 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState
                         clusterState,
                         resolvedAddresses,
                         foundPeers,
    +                    mastersOfPeers,
    +                    currentTerm,
    +                    electionStrategy,
    +                    statusInfo,
    +                    joinStatuses
    +                );
    +            }
    +            case 6 -> {
    +                List newMastersOfPeers = new ArrayList<>(mastersOfPeers);
    +                if (mastersOfPeers.isEmpty() || randomBoolean()) {
    +                    newMastersOfPeers.add(DiscoveryNodeUtils.create(UUID.randomUUID().toString()));
    +                } else {
    +                    newMastersOfPeers.remove(0);
    +                }
    +                return new ClusterFormationState(
    +                    settings,
    +                    clusterState,
    +                    resolvedAddresses,
    +                    foundPeers,
    +                    Set.copyOf(newMastersOfPeers),
                         currentTerm,
                         electionStrategy,
                         statusInfo,
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java
    index 0d93dfb3d7f62..2ad0f18de277f 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java
    @@ -993,6 +993,7 @@ private ClusterFormationFailureHelper.ClusterFormationState getClusterFormationS
                 hasDiscoveredAllNodes
                     ? allMasterEligibleNodes
                     : randomSubsetOf(randomInt(allMasterEligibleNodes.size() - 1), allMasterEligibleNodes),
    +            Collections.emptySet(),
                 randomLong(),
                 hasDiscoveredQuorum,
                 new StatusInfo(randomFrom(StatusInfo.Status.HEALTHY, StatusInfo.Status.UNHEALTHY), randomAlphaOfLength(20)),
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java
    index ee5b8b652d2d9..d634d1f5818ae 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java
    @@ -29,7 +29,6 @@
     import org.elasticsearch.test.EqualsHashCodeTestUtils.CopyFunction;
     import org.elasticsearch.test.transport.CapturingTransport;
     import org.elasticsearch.test.transport.MockTransport;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.transport.AbstractSimpleTransportTestCase;
     import org.elasticsearch.transport.ConnectTransportException;
     import org.elasticsearch.transport.TransportException;
    @@ -547,7 +546,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req
                 new FollowerCheckRequest(leaderTerm, leader),
                 new TransportResponseHandler.Empty() {
                     @Override
    -                public Executor executor(ThreadPool threadPool) {
    +                public Executor executor() {
                         return TransportResponseHandler.TRANSPORT_WORKER;
                     }
     
    @@ -636,7 +635,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req
                     new FollowerCheckRequest(leaderTerm, leader),
                     new TransportResponseHandler.Empty() {
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    @@ -703,7 +702,7 @@ public void handleException(TransportException exp) {
                     new FollowerCheckRequest(term, leader),
                     new TransportResponseHandler.Empty() {
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    @@ -739,7 +738,7 @@ public void handleException(TransportException exp) {
                     new FollowerCheckRequest(term, leader),
                     new TransportResponseHandler.Empty() {
                         @Override
    -                    public Executor executor(ThreadPool threadPool) {
    +                    public Executor executor() {
                             return TransportResponseHandler.TRANSPORT_WORKER;
                         }
     
    @@ -828,7 +827,7 @@ private static class ExpectsSuccess extends TransportResponseHandler.Empty {
             private final AtomicBoolean responseReceived = new AtomicBoolean();
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    +        public Executor executor() {
                 return TransportResponseHandler.TRANSPORT_WORKER;
             }
     
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java
    index f69596be8ce65..f1757973b7b9f 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java
    @@ -12,7 +12,7 @@
     import org.elasticsearch.cluster.node.DiscoveryNode;
     import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
     import org.elasticsearch.common.ReferenceDocs;
    -import org.elasticsearch.common.logging.ChunkedLoggingStreamTests;
    +import org.elasticsearch.common.logging.ChunkedLoggingStreamTestUtils;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue;
     import org.elasticsearch.core.TimeValue;
    @@ -259,7 +259,7 @@ public void testHotThreadsChunkedLoggingEncoding() {
             final var expectedBody = randomUnicodeOfLengthBetween(1, 20000);
             assertEquals(
                 expectedBody,
    -            ChunkedLoggingStreamTests.getDecodedLoggedBody(
    +            ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(
                     LogManager.getLogger(LOGGER_NAME),
                     Level.DEBUG,
                     "hot threads from node ["
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java
    index 93ebf894771e3..6585cd8f9bc13 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java
    @@ -26,7 +26,6 @@
     import org.elasticsearch.test.EqualsHashCodeTestUtils.CopyFunction;
     import org.elasticsearch.test.transport.CapturingTransport;
     import org.elasticsearch.test.transport.MockTransport;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.transport.AbstractSimpleTransportTestCase;
     import org.elasticsearch.transport.ConnectTransportException;
     import org.elasticsearch.transport.ReceiveTimeoutTransportException;
    @@ -486,6 +485,7 @@ public void testLeaderBehaviour() {
             );
             transportService.start();
             transportService.acceptIncomingRequests();
    +        final var executor = transportService.getThreadPool().generic();
     
             final LeaderChecker leaderChecker = new LeaderChecker(
                 settings,
    @@ -503,7 +503,7 @@ public void testLeaderBehaviour() {
             {
                 leaderChecker.setCurrentNodes(discoveryNodes);
     
    -            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler();
    +            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler(executor);
                 transportService.sendRequest(localNode, LEADER_CHECK_ACTION_NAME, new LeaderCheckRequest(otherNode), handler);
                 deterministicTaskQueue.runAllTasks();
     
    @@ -518,7 +518,7 @@ public void testLeaderBehaviour() {
             {
                 leaderChecker.setCurrentNodes(discoveryNodes);
     
    -            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler();
    +            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler(executor);
                 transportService.sendRequest(localNode, LEADER_CHECK_ACTION_NAME, new LeaderCheckRequest(otherNode), handler);
                 deterministicTaskQueue.runAllTasks();
     
    @@ -531,7 +531,7 @@ public void testLeaderBehaviour() {
             {
                 leaderChecker.setCurrentNodes(DiscoveryNodes.builder(discoveryNodes).add(otherNode).build());
     
    -            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler();
    +            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler(executor);
                 transportService.sendRequest(localNode, LEADER_CHECK_ACTION_NAME, new LeaderCheckRequest(otherNode), handler);
                 deterministicTaskQueue.runAllTasks();
     
    @@ -542,7 +542,7 @@ public void testLeaderBehaviour() {
             {
                 leaderChecker.setCurrentNodes(DiscoveryNodes.builder(discoveryNodes).add(otherNode).masterNodeId(null).build());
     
    -            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler();
    +            final CapturingTransportResponseHandler handler = new CapturingTransportResponseHandler(executor);
                 transportService.sendRequest(localNode, LEADER_CHECK_ACTION_NAME, new LeaderCheckRequest(otherNode), handler);
                 deterministicTaskQueue.runAllTasks();
     
    @@ -557,6 +557,11 @@ private static class CapturingTransportResponseHandler implements TransportRespo
     
             TransportException transportException;
             boolean successfulResponseReceived;
    +        final Executor executor;
    +
    +        private CapturingTransportResponseHandler(Executor executor) {
    +            this.executor = executor;
    +        }
     
             @Override
             public void handleResponse(TransportResponse.Empty response) {
    @@ -569,8 +574,8 @@ public void handleException(TransportException exp) {
             }
     
             @Override
    -        public Executor executor(ThreadPool threadPool) {
    -            return threadPool.generic();
    +        public Executor executor() {
    +            return executor;
             }
     
             @Override
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java
    index 1a9d068da12ad..b8dfdd8e91231 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java
    @@ -195,7 +195,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req
                         );
                     } else if (action.equals(JoinValidationService.JOIN_VALIDATE_ACTION_NAME)
                         || action.equals(JoinHelper.JOIN_PING_ACTION_NAME)) {
    -                        handleResponse(requestId, new TransportResponse.Empty());
    +                        handleResponse(requestId, TransportResponse.Empty.INSTANCE);
                         } else {
                             super.onSendRequest(requestId, action, request, destination);
                         }
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java
    index 18385b1d7ad44..77c59fe9e8209 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java
    @@ -43,9 +43,9 @@
     import java.util.Map;
     import java.util.UUID;
     
    +import static org.hamcrest.Matchers.containsInAnyOrder;
     import static org.hamcrest.Matchers.emptyOrNullString;
     import static org.hamcrest.Matchers.equalTo;
    -import static org.hamcrest.Matchers.is;
     import static org.hamcrest.Matchers.not;
     import static org.mockito.Mockito.mock;
     import static org.mockito.Mockito.when;
    @@ -126,9 +126,14 @@ public void testGetHealthIndicatorResultNotGreenVerboseTrue() throws Exception {
             assertThat(nodeIdToClusterFormationMap.get(node2.getId()), equalTo(node2ClusterFormation));
             assertThat(nodeIdToNodeNameMap.get(node1.getId()), equalTo(node1.getName()));
             assertThat(nodeIdToNodeNameMap.get(node2.getId()), equalTo(node2.getName()));
    -        List diagnosis = result.diagnosisList();
    -        assertThat(diagnosis.size(), equalTo(1));
    -        assertThat(diagnosis.get(0), is(StableMasterHealthIndicatorService.CONTACT_SUPPORT));
    +        assertThat(
    +            result.diagnosisList(),
    +            containsInAnyOrder(
    +                StableMasterHealthIndicatorService.CONTACT_SUPPORT,
    +                StableMasterHealthIndicatorService.TROUBLESHOOT_DISCOVERY,
    +                StableMasterHealthIndicatorService.TROUBLESHOOT_UNSTABLE_CLUSTER
    +            )
    +        );
         }
     
         public void testGetHealthIndicatorResultNotGreenVerboseFalse() throws Exception {
    diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollectorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollectorTests.java
    index c430771342669..d1b81c4d9c601 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollectorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/StatefulPreVoteCollectorTests.java
    @@ -19,7 +19,6 @@
     import org.elasticsearch.monitor.StatusInfo;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.transport.MockTransport;
    -import org.elasticsearch.threadpool.ThreadPool;
     import org.elasticsearch.transport.ConnectTransportException;
     import org.elasticsearch.transport.RemoteTransportException;
     import org.elasticsearch.transport.TransportException;
    @@ -314,7 +313,7 @@ public PreVoteResponse read(StreamInput in) throws IOException {
                     }
     
                     @Override
    -                public Executor executor(ThreadPool threadPool) {
    +                public Executor executor() {
                         return TransportResponseHandler.TRANSPORT_WORKER;
                     }
     
    diff --git a/server/src/test/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidatorTests.java b/server/src/test/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidatorTests.java
    deleted file mode 100644
    index 819ec4b5266ac..0000000000000
    --- a/server/src/test/java/org/elasticsearch/cluster/desirednodes/DesiredNodesSettingsValidatorTests.java
    +++ /dev/null
    @@ -1,35 +0,0 @@
    -/*
    - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    - * or more contributor license agreements. Licensed under the Elastic License
    - * 2.0 and the Server Side Public License, v 1; you may not use this file except
    - * in compliance with, at your election, the Elastic License 2.0 or the Server
    - * Side Public License, v 1.
    - */
    -
    -package org.elasticsearch.cluster.desirednodes;
    -
    -import org.elasticsearch.Version;
    -import org.elasticsearch.cluster.metadata.DesiredNode;
    -import org.elasticsearch.common.settings.Settings;
    -import org.elasticsearch.test.ESTestCase;
    -
    -import java.util.List;
    -
    -import static org.elasticsearch.cluster.metadata.DesiredNodesTestCase.randomDesiredNode;
    -import static org.hamcrest.Matchers.containsString;
    -import static org.hamcrest.Matchers.emptyArray;
    -import static org.hamcrest.Matchers.not;
    -
    -public class DesiredNodesSettingsValidatorTests extends ESTestCase {
    -    public void testNodeVersionValidation() {
    -        final List desiredNodes = List.of(randomDesiredNode(Version.CURRENT.previousMajor(), Settings.EMPTY));
    -
    -        final DesiredNodesSettingsValidator validator = new DesiredNodesSettingsValidator();
    -
    -        final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> validator.accept(desiredNodes));
    -        assertThat(exception.getMessage(), containsString("Nodes with ids"));
    -        assertThat(exception.getMessage(), containsString("contain invalid settings"));
    -        assertThat(exception.getSuppressed(), not(emptyArray()));
    -        assertThat(exception.getSuppressed()[0].getMessage(), containsString("Illegal node version"));
    -    }
    -}
    diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeSerializationTests.java
    index 20f7298fffe0b..dacb96b146b11 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeSerializationTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeSerializationTests.java
    @@ -39,54 +39,41 @@ protected DesiredNode mutateInstance(DesiredNode instance) {
         }
     
         public static DesiredNode mutateDesiredNode(DesiredNode instance) {
    -        final var mutationBranch = randomInt(5);
    +        final var mutationBranch = randomInt(4);
             return switch (mutationBranch) {
                 case 0 -> new DesiredNode(
                     Settings.builder().put(instance.settings()).put(randomAlphaOfLength(10), randomInt()).build(),
                     instance.processors(),
                     instance.processorsRange(),
                     instance.memory(),
    -                instance.storage(),
    -                instance.version()
    +                instance.storage()
                 );
                 case 1 -> new DesiredNode(
                     instance.settings(),
                     randomValueOtherThan(instance.processors(), () -> Processors.of(randomDouble() + randomIntBetween(1, 128))),
                     null,
                     instance.memory(),
    -                instance.storage(),
    -                instance.version()
    +                instance.storage()
                 );
                 case 2 -> new DesiredNode(
                     instance.settings(),
                     randomValueOtherThan(instance.processorsRange(), DesiredNodesTestCase::randomProcessorRange),
                     instance.memory(),
    -                instance.storage(),
    -                instance.version()
    +                instance.storage()
                 );
                 case 3 -> new DesiredNode(
                     instance.settings(),
                     instance.processors(),
                     instance.processorsRange(),
                     ByteSizeValue.ofGb(randomValueOtherThan(instance.memory().getGb(), () -> (long) randomIntBetween(1, 128))),
    -                instance.storage(),
    -                instance.version()
    +                instance.storage()
                 );
                 case 4 -> new DesiredNode(
                     instance.settings(),
                     instance.processors(),
                     instance.processorsRange(),
                     instance.memory(),
    -                ByteSizeValue.ofGb(randomValueOtherThan(instance.storage().getGb(), () -> (long) randomIntBetween(1, 128))),
    -                instance.version()
    -            );
    -            case 5 -> new DesiredNode(
    -                instance.settings(),
    -                instance.processors(),
    -                instance.processorsRange(),
    -                instance.memory(),
    -                instance.storage(),
    -                instance.version().previousMajor()
    +                ByteSizeValue.ofGb(randomValueOtherThan(instance.storage().getGb(), () -> (long) randomIntBetween(1, 128)))
                 );
                 default -> throw new IllegalStateException("Unexpected value: " + mutationBranch);
             };
    diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java
    index 6e26fa26fc041..860f5171d3632 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java
    @@ -8,7 +8,6 @@
     
     package org.elasticsearch.cluster.metadata;
     
    -import org.elasticsearch.Version;
     import org.elasticsearch.cluster.node.DiscoveryNodeRole;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.unit.ByteSizeValue;
    @@ -38,7 +37,7 @@ public void testExternalIdIsRequired() {
     
             final IllegalArgumentException exception = expectThrows(
                 IllegalArgumentException.class,
    -            () -> new DesiredNode(settings.build(), 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT)
    +            () -> new DesiredNode(settings.build(), 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1))
             );
             assertThat(exception.getMessage(), is(equalTo("[node.name] or [node.external_id] is missing or empty")));
         }
    @@ -47,7 +46,7 @@ public void testExternalIdFallbacksToNodeName() {
             final String nodeName = randomAlphaOfLength(10);
             final Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build();
     
    -        DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +        DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
             assertThat(desiredNode.externalId(), is(notNullValue()));
             assertThat(desiredNode.externalId(), is(equalTo(nodeName)));
         }
    @@ -57,7 +56,7 @@ public void testNumberOfProcessorsValidation() {
     
             expectThrows(
                 IllegalArgumentException.class,
    -            () -> new DesiredNode(settings, randomInvalidProcessor(), ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT)
    +            () -> new DesiredNode(settings, randomInvalidProcessor(), ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1))
             );
     
             // Processor ranges
    @@ -67,8 +66,7 @@ public void testNumberOfProcessorsValidation() {
                     settings,
                     new DesiredNode.ProcessorsRange(randomInvalidProcessor(), randomFrom(random(), null, 1.0)),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 )
             );
             expectThrows(
    @@ -77,8 +75,7 @@ public void testNumberOfProcessorsValidation() {
                     settings,
                     new DesiredNode.ProcessorsRange(randomDouble() + 0.1, randomInvalidProcessor()),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 )
             );
             expectThrows(
    @@ -87,8 +84,7 @@ public void testNumberOfProcessorsValidation() {
                     settings,
                     new DesiredNode.ProcessorsRange(randomInvalidProcessor(), randomInvalidProcessor()),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 )
             );
     
    @@ -100,8 +96,7 @@ public void testNumberOfProcessorsValidation() {
                     settings,
                     new DesiredNode.ProcessorsRange(lowerBound, upperBound),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 )
             );
         }
    @@ -110,7 +105,7 @@ public void testHasMasterRole() {
             {
                 final Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build();
     
    -            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
                 assertTrue(desiredNode.hasMasterRole());
             }
     
    @@ -120,7 +115,7 @@ public void testHasMasterRole() {
                     .put(NODE_ROLES_SETTING.getKey(), "master")
                     .build();
     
    -            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
                 assertTrue(desiredNode.hasMasterRole());
             }
     
    @@ -130,7 +125,7 @@ public void testHasMasterRole() {
                     .put(NODE_ROLES_SETTING.getKey(), "data_hot")
                     .build();
     
    -            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            DesiredNode desiredNode = new DesiredNode(settings, 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
                 assertFalse(desiredNode.hasMasterRole());
             }
         }
    @@ -143,7 +138,7 @@ public void testGetRoles() {
                 settings.put(NODE_ROLES_SETTING.getKey(), role.roleName());
             }
     
    -        final var desiredNode = new DesiredNode(settings.build(), 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +        final var desiredNode = new DesiredNode(settings.build(), 1, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
     
             if (role != null) {
                 assertThat(desiredNode.getRoles(), hasSize(1));
    @@ -161,8 +156,7 @@ public void testNodeCPUsRoundUp() {
                     settings,
                     new DesiredNode.ProcessorsRange(0.4, 1.2),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 );
     
                 assertThat(desiredNode.minProcessors().count(), is(equalTo(0.4)));
    @@ -172,7 +166,7 @@ public void testNodeCPUsRoundUp() {
             }
     
             {
    -            final var desiredNode = new DesiredNode(settings, 1.2, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            final var desiredNode = new DesiredNode(settings, 1.2, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
     
                 assertThat(desiredNode.minProcessors().count(), is(equalTo(1.2)));
                 assertThat(desiredNode.roundedDownMinProcessors(), is(equalTo(1)));
    @@ -181,7 +175,7 @@ public void testNodeCPUsRoundUp() {
             }
     
             {
    -            final var desiredNode = new DesiredNode(settings, 1024, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            final var desiredNode = new DesiredNode(settings, 1024, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
     
                 assertThat(desiredNode.minProcessors().count(), is(equalTo(1024.0)));
                 assertThat(desiredNode.roundedDownMinProcessors(), is(equalTo(1024)));
    @@ -198,8 +192,7 @@ public void testDesiredNodeHasRangeFloatProcessors() {
                     settings,
                     new DesiredNode.ProcessorsRange(0.4, 1.2),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 );
                 assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true));
                 assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false));
    @@ -210,15 +203,14 @@ public void testDesiredNodeHasRangeFloatProcessors() {
                     settings,
                     randomIntBetween(0, 10) + randomDoubleBetween(0.00001, 0.99999, true),
                     ByteSizeValue.ofGb(1),
    -                ByteSizeValue.ofGb(1),
    -                Version.CURRENT
    +                ByteSizeValue.ofGb(1)
                 );
                 assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true));
                 assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false));
             }
     
             {
    -            final var desiredNode = new DesiredNode(settings, 2.0f, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1), Version.CURRENT);
    +            final var desiredNode = new DesiredNode(settings, 2.0f, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1));
                 assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true));
                 assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(true));
             }
    @@ -236,13 +228,12 @@ public void testEqualsOrProcessorsCloseTo() {
             final DesiredNode desiredNode1;
             final DesiredNode desiredNode2;
             if (randomBoolean()) {
    -            desiredNode1 = new DesiredNode(settings, processorCount, memory, storage, Version.CURRENT);
    +            desiredNode1 = new DesiredNode(settings, processorCount, memory, storage);
                 desiredNode2 = new DesiredNode(
                     settings,
                     isEqualOrCloseTo ? (float) processorCount : processorCount + maxDelta,
                     memory,
    -                storage,
    -                Version.CURRENT
    +                storage
                 );
             } else {
                 final double desiredNodes1Min = processorCount;
    @@ -268,8 +259,8 @@ public void testEqualsOrProcessorsCloseTo() {
                     desiredNodes2Max
                 );
     
    -            desiredNode1 = new DesiredNode(settings, desiredNodes1ProcessorsRange, memory, storage, Version.CURRENT);
    -            desiredNode2 = new DesiredNode(settings, desiredNodes2ProcessorsRange, memory, storage, Version.CURRENT);
    +            desiredNode1 = new DesiredNode(settings, desiredNodes1ProcessorsRange, memory, storage);
    +            desiredNode2 = new DesiredNode(settings, desiredNodes2ProcessorsRange, memory, storage);
             }
     
             assertThat(desiredNode1.equalsWithProcessorsCloseTo(desiredNode2), is(isEqualOrCloseTo));
    diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTestCase.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTestCase.java
    index c7d44238e389b..d99d787a1d243 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTestCase.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTestCase.java
    @@ -8,7 +8,6 @@
     
     package org.elasticsearch.cluster.metadata;
     
    -import org.elasticsearch.Version;
     import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest;
     import org.elasticsearch.cluster.ClusterName;
     import org.elasticsearch.cluster.ClusterState;
    @@ -44,38 +43,32 @@ public static DesiredNodeWithStatus randomDesiredNodeWithStatus() {
         }
     
         public static DesiredNode randomDesiredNode() {
    -        return randomDesiredNode(Version.CURRENT, Settings.EMPTY);
    +        return randomDesiredNode(Settings.EMPTY);
         }
     
         public static DesiredNode randomDesiredNode(Settings settings) {
    -        return randomDesiredNode(Version.CURRENT, settings);
    -    }
    -
    -    public static DesiredNode randomDesiredNode(Version version, Settings settings) {
             if (randomBoolean()) {
    -            return randomDesiredNode(version, settings, randomProcessorRange());
    +            return randomDesiredNode(settings, randomProcessorRange());
             } else {
    -            return randomDesiredNode(version, settings, randomNumberOfProcessors());
    +            return randomDesiredNode(settings, randomNumberOfProcessors());
             }
         }
     
    -    public static DesiredNode randomDesiredNode(Version version, Settings settings, double processors) {
    +    public static DesiredNode randomDesiredNode(Settings settings, double processors) {
             return new DesiredNode(
                 addExternalIdIfMissing(settings),
                 processors,
                 ByteSizeValue.ofGb(randomIntBetween(1, 1024)),
    -            ByteSizeValue.ofTb(randomIntBetween(1, 40)),
    -            version
    +            ByteSizeValue.ofTb(randomIntBetween(1, 40))
             );
         }
     
    -    public static DesiredNode randomDesiredNode(Version version, Settings settings, DesiredNode.ProcessorsRange processorsRange) {
    +    public static DesiredNode randomDesiredNode(Settings settings, DesiredNode.ProcessorsRange processorsRange) {
             return new DesiredNode(
                 addExternalIdIfMissing(settings),
                 processorsRange,
                 ByteSizeValue.ofGb(randomIntBetween(1, 1024)),
    -            ByteSizeValue.ofTb(randomIntBetween(1, 40)),
    -            version
    +            ByteSizeValue.ofTb(randomIntBetween(1, 40))
             );
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTests.java
    index 41c2e7674bce9..e24f1eb84543b 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesTests.java
    @@ -8,7 +8,6 @@
     
     package org.elasticsearch.cluster.metadata;
     
    -import org.elasticsearch.Version;
     import org.elasticsearch.cluster.ClusterName;
     import org.elasticsearch.cluster.ClusterState;
     import org.elasticsearch.common.UUIDs;
    @@ -189,8 +188,7 @@ private DesiredNode desiredNodeWithDifferentSpecsAndSameExternalId(DesiredNode d
                 desiredNode.settings(),
                 desiredNode.minProcessors().count() + randomIntBetween(1, 10),
                 ByteSizeValue.ofGb(desiredNode.memory().getGb() + randomIntBetween(15, 20)),
    -            ByteSizeValue.ofGb(desiredNode.storage().getGb() + randomIntBetween(1, 100)),
    -            Version.CURRENT
    +            ByteSizeValue.ofGb(desiredNode.storage().getGb() + randomIntBetween(1, 100))
             );
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java
    index 264d8c5ca1a95..07ccf0e8f34e7 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java
    @@ -155,6 +155,95 @@ public void testFindAliases() {
             }
         }
     
    +    public void testFindDataStreamAliases() {
    +        Metadata.Builder builder = Metadata.builder();
    +
    +        addDataStream("d1", builder);
    +        addDataStream("d2", builder);
    +        addDataStream("d3", builder);
    +        addDataStream("d4", builder);
    +
    +        builder.put("alias1", "d1", null, null);
    +        builder.put("alias2", "d2", null, null);
    +        builder.put("alias2-part2", "d2", null, null);
    +
    +        Metadata metadata = builder.build();
    +
    +        {
    +            GetAliasesRequest request = new GetAliasesRequest();
    +            Map> aliases = metadata.findDataStreamAliases(request.aliases(), Strings.EMPTY_ARRAY);
    +            assertThat(aliases, anEmptyMap());
    +        }
    +
    +        {
    +            GetAliasesRequest request = new GetAliasesRequest().aliases("alias1");
    +            Map> aliases = metadata.findDataStreamAliases(request.aliases(), new String[] { "index" });
    +            assertThat(aliases, anEmptyMap());
    +        }
    +
    +        {
    +            GetAliasesRequest request = new GetAliasesRequest().aliases("alias1");
    +            Map> aliases = metadata.findDataStreamAliases(
    +                request.aliases(),
    +                new String[] { "index", "d1", "d2" }
    +            );
    +            assertEquals(1, aliases.size());
    +            List found = aliases.get("d1");
    +            assertThat(found, transformedItemsMatch(DataStreamAlias::getAlias, contains("alias1")));
    +        }
    +
    +        {
    +            GetAliasesRequest request = new GetAliasesRequest().aliases("ali*");
    +            Map> aliases = metadata.findDataStreamAliases(request.aliases(), new String[] { "index", "d2" });
    +            assertEquals(1, aliases.size());
    +            List found = aliases.get("d2");
    +            assertThat(found, transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
    +        }
    +
    +        // test exclusion
    +        {
    +            GetAliasesRequest request = new GetAliasesRequest().aliases("*");
    +            Map> aliases = metadata.findDataStreamAliases(
    +                request.aliases(),
    +                new String[] { "index", "d1", "d2", "d3", "d4" }
    +            );
    +            assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
    +            assertThat(aliases.get("d1"), transformedItemsMatch(DataStreamAlias::getAlias, contains("alias1")));
    +
    +            request.aliases("*", "-alias1");
    +            aliases = metadata.findDataStreamAliases(request.aliases(), new String[] { "index", "d1", "d2", "d3", "d4" });
    +            assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
    +            assertNull(aliases.get("d1"));
    +        }
    +    }
    +
    +    public void testDataStreamAliasesByDataStream() {
    +        Metadata.Builder builder = Metadata.builder();
    +
    +        addDataStream("d1", builder);
    +        addDataStream("d2", builder);
    +        addDataStream("d3", builder);
    +        addDataStream("d4", builder);
    +
    +        builder.put("alias1", "d1", null, null);
    +        builder.put("alias2", "d2", null, null);
    +        builder.put("alias2-part2", "d2", null, null);
    +
    +        Metadata metadata = builder.build();
    +
    +        var aliases = metadata.dataStreamAliasesByDataStream();
    +
    +        assertTrue(aliases.containsKey("d1"));
    +        assertTrue(aliases.containsKey("d2"));
    +        assertFalse(aliases.containsKey("d3"));
    +        assertFalse(aliases.containsKey("d4"));
    +
    +        assertEquals(1, aliases.get("d1").size());
    +        assertEquals(2, aliases.get("d2").size());
    +
    +        assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
    +    }
    +
         public void testFindAliasWithExclusion() {
             Metadata metadata = Metadata.builder()
                 .put(
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java
    index dd92589f0af89..0dcb4eebb02d4 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java
    @@ -32,6 +32,7 @@
     import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
     import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.shouldReserveSpaceForInitializingShard;
     import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING;
     import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE;
     import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING;
    @@ -44,13 +45,10 @@ public class ExpectedShardSizeEstimatorTests extends ESAllocationTestCase {
         public void testShouldFallbackToDefaultExpectedShardSize() {
     
             var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build();
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            randomIdentifier(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, RecoverySource.ExistingStoreRecoverySource.INSTANCE)
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), randomIdentifier(), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(
    +                randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, RecoverySource.ExistingStoreRecoverySource.INSTANCE)
    +            ).build();
     
             var allocation = createRoutingAllocation(state, ClusterInfo.EMPTY, SnapshotShardSizeInfo.EMPTY);
     
    @@ -65,13 +63,9 @@ public void testShouldReadExpectedSizeFromClusterInfo() {
     
             var shardSize = randomLongBetween(100, 1000);
             var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build();
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            randomIdentifier(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), randomIdentifier(), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
     
             var clusterInfo = createClusterInfo(shard, shardSize);
             var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY);
    @@ -119,13 +113,9 @@ public void testShouldReadExpectedSizeWhenInitializingFromSnapshot() {
             var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na"));
             var indexId = new IndexId("my-index", "_na_");
     
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            randomIdentifier(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), randomIdentifier(), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId))
    +            .build();
     
             var snapshotShardSizeInfo = new SnapshotShardSizeInfo(
                 Map.of(new InternalSnapshotsInfoService.SnapshotShard(snapshot, indexId, shard.shardId()), snapshotShardSize)
    @@ -147,13 +137,9 @@ public void testShouldReadSizeFromClonedShard() {
     
             var sourceShardSize = randomLongBetween(100, 1000);
             var source = newShardRouting(new ShardId("source", "_na_", 0), randomIdentifier(), true, ShardRoutingState.STARTED);
    -        var target = newShardRouting(
    -            new ShardId("target", "_na_", 0),
    -            randomIdentifier(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        var target = shardRoutingBuilder(new ShardId("target", "_na_", 0), randomIdentifier(), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
    +            .build();
     
             var state = ClusterState.builder(ClusterName.DEFAULT)
                 .metadata(
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java
    index 166ffd80b1055..c5dbf31b03747 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java
    @@ -170,7 +170,7 @@ private ShardRouting getShard(ShardId shardId, boolean isPrimary, ShardRoutingSt
                 state == ShardRoutingState.UNASSIGNED || state == ShardRoutingState.STARTED ? null : randomIdentifier(),
                 isPrimary,
                 state,
    -            TestShardRouting.buildRecoveryTarget(isPrimary, state),
    +            TestShardRouting.buildRecoverySource(isPrimary, state),
                 TestShardRouting.buildUnassignedInfo(state),
                 TestShardRouting.buildRelocationFailureInfo(state),
                 TestShardRouting.buildAllocationId(state),
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/RecoverySourceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/RecoverySourceTests.java
    index c326adb1faa0c..267ce09821608 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/RecoverySourceTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/RecoverySourceTests.java
    @@ -19,7 +19,7 @@
     public class RecoverySourceTests extends ESTestCase {
     
         public void testSerialization() throws IOException {
    -        RecoverySource recoverySource = TestShardRouting.randomRecoverySource();
    +        RecoverySource recoverySource = TestShardRouting.buildRecoverySource();
             BytesStreamOutput out = new BytesStreamOutput();
             recoverySource.writeTo(out);
             RecoverySource serializedRecoverySource = RecoverySource.readFrom(out.bytes().streamInput());
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
    index 56d3a3910cf5c..e6466b9237d3a 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
    @@ -25,6 +25,7 @@
     import java.util.Objects;
     
     import static java.util.Objects.requireNonNullElseGet;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.containsString;
     
     public class ShardRoutingTests extends AbstractWireSerializingTestCase {
    @@ -44,7 +45,7 @@ protected ShardRouting createTestInstance() {
                 state == ShardRoutingState.UNASSIGNED || state == ShardRoutingState.STARTED ? null : randomIdentifier(),
                 primary,
                 state,
    -            TestShardRouting.buildRecoveryTarget(primary, state),
    +            TestShardRouting.buildRecoverySource(primary, state),
                 TestShardRouting.buildUnassignedInfo(state),
                 TestShardRouting.buildRelocationFailureInfo(state),
                 TestShardRouting.buildAllocationId(state),
    @@ -99,7 +100,7 @@ private static ShardRouting mutateState(ShardRouting instance) {
                     ? null
                     : requireNonNullElseGet(
                         instance.recoverySource(),
    -                    () -> TestShardRouting.buildRecoveryTarget(instance.primary(), newState)
    +                    () -> TestShardRouting.buildRecoverySource(instance.primary(), newState)
                     ),
                 newState == ShardRoutingState.STARTED || newState == ShardRoutingState.RELOCATING
                     ? null
    @@ -358,49 +359,51 @@ public void testEqualsIgnoringVersion() {
                         break;
                     case 5:
                         // change primary flag
    -                    otherRouting = TestShardRouting.newShardRouting(
    +                    otherRouting = shardRoutingBuilder(
                             otherRouting.getIndexName(),
                             otherRouting.id(),
                             otherRouting.currentNodeId(),
    -                        otherRouting.relocatingNodeId(),
                             otherRouting.primary() == false,
    -                        otherRouting.state(),
    -                        otherRouting.unassignedInfo()
    -                    );
    +                        otherRouting.state()
    +                    ).withRelocatingNodeId(otherRouting.relocatingNodeId()).withUnassignedInfo(otherRouting.unassignedInfo()).build();
                         break;
                     case 6:
                         // change state
                         ShardRoutingState newState = randomValueOtherThan(otherRouting.state(), () -> randomFrom(ShardRoutingState.values()));
    -                    otherRouting = TestShardRouting.newShardRouting(
    +                    otherRouting = shardRoutingBuilder(
                             otherRouting.getIndexName(),
                             otherRouting.id(),
                             newState == ShardRoutingState.UNASSIGNED ? null : Objects.requireNonNullElse(otherRouting.currentNodeId(), "1"),
    -                        newState == ShardRoutingState.RELOCATING ? "2" : null,
                             otherRouting.primary(),
    -                        newState,
    -                        newState == ShardRoutingState.UNASSIGNED || newState == ShardRoutingState.INITIALIZING
    -                            ? Objects.requireNonNullElse(
    -                                otherRouting.unassignedInfo(),
    -                                new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test")
    -                            )
    -                            : null
    -                    );
    +                        newState
    +                    ).withRelocatingNodeId(newState == ShardRoutingState.RELOCATING ? "2" : null)
    +                        .withUnassignedInfo(
    +                            newState == ShardRoutingState.UNASSIGNED || newState == ShardRoutingState.INITIALIZING
    +                                ? Objects.requireNonNullElse(
    +                                    otherRouting.unassignedInfo(),
    +                                    new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test")
    +                                )
    +                                : null
    +                        )
    +                        .build();
                         break;
                 }
     
                 if (randomBoolean() && otherRouting.state() == ShardRoutingState.UNASSIGNED) {
                     // change unassigned info
    -                otherRouting = TestShardRouting.newShardRouting(
    +                otherRouting = shardRoutingBuilder(
                         otherRouting.getIndexName(),
                         otherRouting.id(),
                         otherRouting.currentNodeId(),
    -                    otherRouting.relocatingNodeId(),
                         otherRouting.primary(),
    -                    otherRouting.state(),
    -                    otherRouting.unassignedInfo() == null
    -                        ? new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test")
    -                        : new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().getMessage() + "_1")
    -                );
    +                    otherRouting.state()
    +                ).withRelocatingNodeId(otherRouting.relocatingNodeId())
    +                    .withUnassignedInfo(
    +                        otherRouting.unassignedInfo() == null
    +                            ? new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test")
    +                            : new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().getMessage() + "_1")
    +                    )
    +                    .build();
                 }
     
                 if (unchanged == false) {
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
    index b5e0f3a468bcd..dd50e2d4e8b9d 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
    @@ -51,6 +51,7 @@
     import static org.elasticsearch.cluster.routing.RoutingNodesHelper.shardsWithState;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.greaterThan;
    @@ -505,15 +506,9 @@ public void testReplicaAdded() {
          * The unassigned meta is kept when a shard goes to INITIALIZING, but cleared when it moves to STARTED.
          */
         public void testStateTransitionMetaHandling() {
    -        ShardRouting shard = TestShardRouting.newShardRouting(
    -            "test",
    -            1,
    -            null,
    -            null,
    -            true,
    -            ShardRoutingState.UNASSIGNED,
    +        ShardRouting shard = shardRoutingBuilder("test", 1, null, true, ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                 new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)
    -        );
    +        ).build();
             assertThat(shard.unassignedInfo(), notNullValue());
             shard = shard.initialize("test_node", null, -1);
             assertThat(shard.state(), equalTo(ShardRoutingState.INITIALIZING));
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
    index 4640392d7b164..8e11c671b8ba4 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
    @@ -68,7 +68,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.hasSize;
    @@ -705,8 +705,8 @@ public void testCanceledShardIsInitializedRespectingAllocationDeciders() {
                 .build();
             var shardId = new ShardId(indexMetadata.getIndex(), 0);
     
    -        ShardRouting primary = newShardRouting(shardId, "node-0", null, true, STARTED, allocationId1);
    -        ShardRouting replica = newShardRouting(shardId, "node-1", null, false, STARTED, allocationId2);
    +        ShardRouting primary = shardRoutingBuilder(shardId, "node-0", true, STARTED).withAllocationId(allocationId1).build();
    +        ShardRouting replica = shardRoutingBuilder(shardId, "node-1", false, STARTED).withAllocationId(allocationId2).build();
     
             ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
                 .nodes(
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
    index 911769b0115dc..47173407a419e 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
    @@ -21,7 +21,6 @@
     import org.elasticsearch.cluster.routing.RoutingNodes;
     import org.elasticsearch.cluster.routing.RoutingTable;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceShardsAllocator;
     import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
     import org.elasticsearch.common.UUIDs;
    @@ -35,6 +34,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.not;
     import static org.hamcrest.Matchers.nullValue;
    @@ -498,14 +498,9 @@ private static void addIndex(
     
             for (int shardId = 0; shardId < numberOfShards; shardId++) {
                 indexRoutingTableBuilder.addShard(
    -                TestShardRouting.newShardRouting(
    -                    new ShardId(indexId, shardId),
    -                    assignmentFunction.apply(shardId),
    -                    null,
    -                    true,
    -                    ShardRoutingState.STARTED,
    -                    AllocationId.newInitializing(inSyncIds.get(shardId))
    -                )
    +                shardRoutingBuilder(new ShardId(indexId, shardId), assignmentFunction.apply(shardId), true, ShardRoutingState.STARTED)
    +                    .withAllocationId(AllocationId.newInitializing(inSyncIds.get(shardId)))
    +                    .build()
                 );
             }
     
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
    index c622a187d09ae..32a1e1d14876f 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
    @@ -32,7 +32,6 @@
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingHelper;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
     import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
     import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
    @@ -69,6 +68,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.notNullValue;
     import static org.hamcrest.Matchers.nullValue;
    @@ -352,26 +352,18 @@ public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNode
                     IndexRoutingTable.builder(shard1.getIndex())
                         .addIndexShard(
                             new IndexShardRoutingTable.Builder(shard1).addShard(
    -                            TestShardRouting.newShardRouting(
    -                                shard1.getIndexName(),
    -                                shard1.getId(),
    -                                newNode.getId(),
    -                                null,
    -                                true,
    -                                ShardRoutingState.STARTED,
    -                                allocationId1P
    -                            )
    +                            shardRoutingBuilder(shard1.getIndexName(), shard1.getId(), newNode.getId(), true, ShardRoutingState.STARTED)
    +                                .withAllocationId(allocationId1P)
    +                                .build()
                             )
                                 .addShard(
    -                                TestShardRouting.newShardRouting(
    +                                shardRoutingBuilder(
                                         shard1.getIndexName(),
                                         shard1.getId(),
                                         oldNode1.getId(),
    -                                    null,
                                         false,
    -                                    ShardRoutingState.STARTED,
    -                                    allocationId1R
    -                                )
    +                                    ShardRoutingState.STARTED
    +                                ).withAllocationId(allocationId1R).build()
                                 )
                         )
                 )
    @@ -379,26 +371,18 @@ public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNode
                     IndexRoutingTable.builder(shard2.getIndex())
                         .addIndexShard(
                             new IndexShardRoutingTable.Builder(shard2).addShard(
    -                            TestShardRouting.newShardRouting(
    -                                shard2.getIndexName(),
    -                                shard2.getId(),
    -                                newNode.getId(),
    -                                null,
    -                                true,
    -                                ShardRoutingState.STARTED,
    -                                allocationId2P
    -                            )
    +                            shardRoutingBuilder(shard2.getIndexName(), shard2.getId(), newNode.getId(), true, ShardRoutingState.STARTED)
    +                                .withAllocationId(allocationId2P)
    +                                .build()
                             )
                                 .addShard(
    -                                TestShardRouting.newShardRouting(
    +                                shardRoutingBuilder(
                                         shard2.getIndexName(),
                                         shard2.getId(),
                                         oldNode1.getId(),
    -                                    null,
                                         false,
    -                                    ShardRoutingState.STARTED,
    -                                    allocationId2R
    -                                )
    +                                    ShardRoutingState.STARTED
    +                                ).withAllocationId(allocationId2R).build()
                                 )
                         )
                 )
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java
    index cf9b141fef8d2..93ff6cbd11b6a 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java
    @@ -42,6 +42,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.equalTo;
     
     public class ResizeAllocationDeciderTests extends ESAllocationTestCase {
    @@ -144,13 +145,9 @@ public void testShrink() { // we don't handle shrink yet
     
             ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider();
             RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
    -        ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            new ShardId(idx, 0),
    -            null,
    -            true,
    -            ShardRoutingState.UNASSIGNED,
    +        ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, 0), null, true, ShardRoutingState.UNASSIGNED).withRecoverySource(
                 RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        ).build();
             assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
             assertEquals(
                 Decision.ALWAYS,
    @@ -188,13 +185,9 @@ public void testSourceNotActive() {
             RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
             int shardId = randomIntBetween(0, 3);
             int sourceShardId = IndexMetadata.selectSplitShard(shardId, clusterState.metadata().index("source"), 4).id();
    -        ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            new ShardId(idx, shardId),
    -            null,
    -            true,
    -            ShardRoutingState.UNASSIGNED,
    -            RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED)
    +            .withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
    +            .build();
             assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
             assertEquals(
                 Decision.NO,
    @@ -248,13 +241,9 @@ public void testSourcePrimaryActive() {
             RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
             int shardId = randomIntBetween(0, 3);
             int sourceShardId = IndexMetadata.selectSplitShard(shardId, clusterState.metadata().index("source"), 4).id();
    -        ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            new ShardId(idx, shardId),
    -            null,
    -            true,
    -            ShardRoutingState.UNASSIGNED,
    -            RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED)
    +            .withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
    +            .build();
             assertEquals(Decision.YES, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
     
             String allowedNode = clusterState.getRoutingTable().index("source").shard(sourceShardId).primaryShard().currentNodeId();
    @@ -325,7 +314,10 @@ public void testGetForcedInitialShardAllocationToNodes() {
                     RoutingTable.builder()
                         .add(
                             IndexRoutingTable.builder(source.getIndex())
    -                            .addShard(TestShardRouting.newShardRouting(new ShardId(source.getIndex(), 0), "node-1", true, STARTED, null))
    +                            .addShard(
    +                                shardRoutingBuilder(new ShardId(source.getIndex(), 0), "node-1", true, STARTED).withRecoverySource(null)
    +                                    .build()
    +                            )
                         )
                 )
                 .build();
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java
    index a766adcfdd423..4af3cd09f1d4e 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java
    @@ -26,7 +26,6 @@
     import org.elasticsearch.cluster.routing.RoutingNodes;
     import org.elasticsearch.cluster.routing.RoutingTable;
     import org.elasticsearch.cluster.routing.ShardRouting;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
     import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
     import org.elasticsearch.common.UUIDs;
    @@ -44,6 +43,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.empty;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.oneOf;
    @@ -434,8 +434,8 @@ private void runMoveShardRolesTest(ShardRouting.Role primaryRole, ShardRouting.R
             var shardId = new ShardId(indexMetadata.getIndex(), 0);
     
             var indexRoutingTable = IndexRoutingTable.builder(indexMetadata.getIndex())
    -            .addShard(TestShardRouting.newShardRouting(shardId, "node-1", null, true, STARTED, primaryRole))
    -            .addShard(TestShardRouting.newShardRouting(shardId, "node-2", null, false, STARTED, replicaRole))
    +            .addShard(shardRoutingBuilder(shardId, "node-1", true, STARTED).withRole(primaryRole).build())
    +            .addShard(shardRoutingBuilder(shardId, "node-2", false, STARTED).withRole(replicaRole).build())
                 .build();
     
             var node1 = newNode("node-1");
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
    index a88d6228e4253..50e225860a9d1 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
    @@ -29,6 +29,7 @@
     import java.util.Collections;
     import java.util.List;
     
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.not;
     import static org.hamcrest.Matchers.nullValue;
    @@ -56,14 +57,10 @@ public void testStartedShardsMatching() {
                 true,
                 ShardRoutingState.INITIALIZING
             );
    -        final ShardRouting relocatingShard = TestShardRouting.newShardRouting(
    -            new ShardId(index, 1),
    -            "node1",
    -            "node2",
    -            true,
    -            ShardRoutingState.RELOCATING,
    -            allocationId
    -        );
    +        final ShardRouting relocatingShard = shardRoutingBuilder(new ShardId(index, 1), "node1", true, ShardRoutingState.RELOCATING)
    +            .withRelocatingNodeId("node2")
    +            .withAllocationId(allocationId)
    +            .build();
             stateBuilder.routingTable(
                 RoutingTable.builder()
                     .add(
    @@ -124,22 +121,16 @@ public void testRelocatingPrimariesWithInitializingReplicas() {
                 .nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")).add(newNode("node3")).add(newNode("node4")))
                 .metadata(Metadata.builder().put(indexMetadata, false));
     
    -        final ShardRouting relocatingPrimary = TestShardRouting.newShardRouting(
    -            new ShardId(index, 0),
    -            "node1",
    -            "node2",
    -            true,
    -            ShardRoutingState.RELOCATING,
    -            primaryId
    -        );
    -        final ShardRouting replica = TestShardRouting.newShardRouting(
    +        final ShardRouting relocatingPrimary = shardRoutingBuilder(new ShardId(index, 0), "node1", true, ShardRoutingState.RELOCATING)
    +            .withRelocatingNodeId("node2")
    +            .withAllocationId(primaryId)
    +            .build();
    +        final ShardRouting replica = shardRoutingBuilder(
                 new ShardId(index, 0),
                 "node3",
    -            relocatingReplica ? "node4" : null,
                 false,
    -            relocatingReplica ? ShardRoutingState.RELOCATING : ShardRoutingState.INITIALIZING,
    -            replicaId
    -        );
    +            relocatingReplica ? ShardRoutingState.RELOCATING : ShardRoutingState.INITIALIZING
    +        ).withRelocatingNodeId(relocatingReplica ? "node4" : null).withAllocationId(replicaId).build();
     
             stateBuilder.routingTable(
                 RoutingTable.builder()
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java
    index fa1a542fff7dd..e03183e43239f 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java
    @@ -27,7 +27,6 @@
     import org.elasticsearch.cluster.routing.RoutingTable;
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision;
     import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
     import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
    @@ -58,6 +57,7 @@
     import static java.util.stream.Collectors.summingLong;
     import static java.util.stream.Collectors.toSet;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Balancer.getIndexDiskUsageInBytes;
     import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.DISK_USAGE_BALANCE_FACTOR_SETTING;
     import static org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS;
    @@ -574,14 +574,9 @@ private void addIndex(
             for (var assignment : assignments.entrySet()) {
                 for (int i = 0; i < assignment.getValue(); i++) {
                     indexRoutingTableBuilder.addShard(
    -                    TestShardRouting.newShardRouting(
    -                        new ShardId(indexId, shardId),
    -                        assignment.getKey(),
    -                        null,
    -                        true,
    -                        ShardRoutingState.STARTED,
    -                        AllocationId.newInitializing(inSyncIds.get(shardId))
    -                    )
    +                    shardRoutingBuilder(new ShardId(indexId, shardId), assignment.getKey(), true, ShardRoutingState.STARTED)
    +                        .withAllocationId(AllocationId.newInitializing(inSyncIds.get(shardId)))
    +                        .build()
                     );
                     shardId++;
                 }
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java
    index d09a6525c9d76..cfaa90b643845 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java
    @@ -50,6 +50,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS;
     import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING;
     import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE;
    @@ -60,13 +61,9 @@ public class ClusterInfoSimulatorTests extends ESAllocationTestCase {
     
         public void testInitializeNewPrimary() {
     
    -        var newPrimary = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            "node-0",
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        var newPrimary = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), "node-0", true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE)
    +            .build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder(1000, 1000))
    @@ -91,13 +88,9 @@ public void testInitializeNewPrimary() {
     
         public void testInitializePreviouslyExistingPrimary() {
     
    -        var existingPrimary = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            "node-0",
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.ExistingStoreRecoverySource.INSTANCE
    -        );
    +        var existingPrimary = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), "node-0", true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.ExistingStoreRecoverySource.INSTANCE)
    +            .build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder(1000, 900))
    @@ -125,13 +118,9 @@ public void testInitializePreviouslyExistingPrimary() {
         public void testInitializeNewReplica() {
     
             var existingPrimary = newShardRouting(new ShardId("my-index", "_na_", 0), "node-0", true, STARTED);
    -        var newReplica = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            "node-1",
    -            false,
    -            INITIALIZING,
    +        var newReplica = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), "node-1", false, INITIALIZING).withRecoverySource(
                 RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        ).build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder(1000, 900))
    @@ -167,13 +156,10 @@ public void testInitializeNewReplicaWithReservedSpace() {
     
             var indexMetadata = IndexMetadata.builder("my-index").settings(indexSettings(IndexVersion.current(), 1, 1)).build();
             var existingPrimary = newShardRouting(new ShardId(indexMetadata.getIndex(), 0), "node-0", true, STARTED);
    -        var newReplica = newShardRouting(
    -            new ShardId(indexMetadata.getIndex(), 0),
    -            "node-1",
    -            false,
    -            INITIALIZING,
    +        ShardId shardId = new ShardId(indexMetadata.getIndex(), 0);
    +        var newReplica = shardRoutingBuilder(shardId, "node-1", false, INITIALIZING).withRecoverySource(
                 RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        ).build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder("/data", 1000, 1000 - totalShardSize))
    @@ -211,14 +197,9 @@ public void testRelocateShard() {
             var fromNodeId = "node-0";
             var toNodeId = "node-1";
     
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            toNodeId,
    -            fromNodeId,
    -            true,
    -            INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), toNodeId, true, INITIALIZING).withRelocatingNodeId(fromNodeId)
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode(fromNodeId, new DiskUsageBuilder(1000, 900))
    @@ -250,14 +231,9 @@ public void testRelocateShardWithMultipleDataPath() {
             var fromNodeId = "node-0";
             var toNodeId = "node-1";
     
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            toNodeId,
    -            fromNodeId,
    -            true,
    -            INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), toNodeId, true, INITIALIZING).withRelocatingNodeId(fromNodeId)
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode(fromNodeId, new DiskUsageBuilder("/data-1", 1000, 500), new DiskUsageBuilder("/data-2", 1000, 750))
    @@ -298,13 +274,12 @@ public void testInitializeShardFromSnapshot() {
     
             var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na"));
             var indexId = new IndexId("my-index", "_na_");
    -        var shard = newShardRouting(
    +        var shard = shardRoutingBuilder(
                 new ShardId(state.metadata().index("my-index").getIndex(), 0),
                 "node-0",
                 true,
    -            ShardRoutingState.INITIALIZING,
    -            new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)
    -        );
    +            ShardRoutingState.INITIALIZING
    +        ).withRecoverySource(new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)).build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder(1000, 1000))
    @@ -344,13 +319,12 @@ public void testInitializeShardFromPartialSearchableSnapshot() {
     
             var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na"));
             var indexId = new IndexId("my-index", "_na_");
    -        var shard = newShardRouting(
    +        var shard = shardRoutingBuilder(
                 new ShardId(state.metadata().index("my-index").getIndex(), 0),
                 "node-0",
                 true,
    -            ShardRoutingState.INITIALIZING,
    -            new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)
    -        );
    +            ShardRoutingState.INITIALIZING
    +        ).withRecoverySource(new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)).build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode("node-0", new DiskUsageBuilder(1000, 1000))
    @@ -394,14 +368,9 @@ public void testRelocatePartialSearchableSnapshotShard() {
             var fromNodeId = "node-0";
             var toNodeId = "node-1";
     
    -        var shard = newShardRouting(
    -            new ShardId("my-index", "_na_", 0),
    -            toNodeId,
    -            fromNodeId,
    -            true,
    -            INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard = shardRoutingBuilder(new ShardId("my-index", "_na_", 0), toNodeId, true, INITIALIZING).withRelocatingNodeId(fromNodeId)
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
     
             var initialClusterInfo = new ClusterInfoTestBuilder() //
                 .withNode(fromNodeId, new DiskUsageBuilder(1000, 1000))
    @@ -432,13 +401,9 @@ public void testInitializeShardFromClone() {
     
             var sourceShardSize = randomLongBetween(100, 1000);
             var source = newShardRouting(new ShardId("source", "_na_", 0), randomIdentifier(), true, ShardRoutingState.STARTED);
    -        var target = newShardRouting(
    -            new ShardId("target", "_na_", 0),
    -            randomIdentifier(),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        var target = shardRoutingBuilder(new ShardId("target", "_na_", 0), randomIdentifier(), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
    +            .build();
     
             var state = ClusterState.builder(ClusterName.DEFAULT)
                 .metadata(
    @@ -486,14 +451,9 @@ public void testDiskUsageSimulationWithSingleDataPathAndDiskThresholdDecider() {
             var shard1 = newShardRouting("index-1", 0, "node-0", null, true, STARTED);
             addIndex(metadataBuilder, routingTableBuilder, shard1);
     
    -        var shard2 = newShardRouting(
    -            new ShardId("index-2", "_na_", 0),
    -            "node-0",
    -            "node-1",
    -            true,
    -            INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard2 = shardRoutingBuilder(new ShardId("index-2", "_na_", 0), "node-0", true, INITIALIZING).withRelocatingNodeId("node-1")
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
             addIndex(metadataBuilder, routingTableBuilder, shard2);
     
             var shard3 = newShardRouting("index-3", 0, "node-1", null, true, STARTED);
    @@ -560,14 +520,9 @@ public void testDiskUsageSimulationWithMultipleDataPathAndDiskThresholdDecider()
             var shard1 = newShardRouting("index-1", 0, "node-0", null, true, STARTED);
             addIndex(metadataBuilder, routingTableBuilder, shard1);
     
    -        var shard2 = newShardRouting(
    -            new ShardId("index-2", "_na_", 0),
    -            "node-0",
    -            "node-1",
    -            true,
    -            INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +        var shard2 = shardRoutingBuilder(new ShardId("index-2", "_na_", 0), "node-0", true, INITIALIZING).withRelocatingNodeId("node-1")
    +            .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE)
    +            .build();
             addIndex(metadataBuilder, routingTableBuilder, shard2);
     
             var shard3 = newShardRouting("index-3", 0, "node-1", null, true, STARTED);
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java
    index 9fe168074f41e..4fb1093698430 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java
    @@ -65,7 +65,6 @@
     import java.util.concurrent.atomic.AtomicBoolean;
     import java.util.concurrent.atomic.AtomicInteger;
     import java.util.concurrent.atomic.AtomicLong;
    -import java.util.function.Function;
     
     import static java.util.stream.Collectors.toMap;
     import static org.elasticsearch.cluster.ClusterInfo.shardIdentifierFromRouting;
    @@ -74,6 +73,7 @@
     import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
     import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
     import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.common.settings.ClusterSettings.createBuiltInClusterSettings;
     import static org.elasticsearch.test.MockLogAppender.assertThatLogger;
     import static org.hamcrest.Matchers.aMapWithSize;
    @@ -638,18 +638,39 @@ public void testDesiredBalanceShouldConvergeInABigCluster() {
                     if (primaryNodeId != null) {
                         dataPath.put(new NodeAndShard(primaryNodeId, shardId), "/data");
                         usedDiskSpace.compute(primaryNodeId, (k, v) -> v + thisShardSize);
    +                    indexRoutingTableBuilder.addShard(
    +                        shardRoutingBuilder(shardId, primaryNodeId, true, STARTED).withAllocationId(
    +                            AllocationId.newInitializing(inSyncIds.get(shard * (replicas + 1)))
    +                        ).build()
    +                    );
    +                } else {
    +                    var lastAllocatedNodeId = randomFrom(remainingNodeIds);
    +                    assertThat(lastAllocatedNodeId, notNullValue());// the only null was picked as primaryNodeId
    +                    dataPath.put(new NodeAndShard(lastAllocatedNodeId, shardId), "/data");
    +                    usedDiskSpace.compute(lastAllocatedNodeId, (k, v) -> v + thisShardSize);
    +                    indexRoutingTableBuilder.addShard(
    +                        shardRoutingBuilder(shardId, null, true, UNASSIGNED).withRecoverySource(
    +                            RecoverySource.ExistingStoreRecoverySource.INSTANCE
    +                        )
    +                            .withUnassignedInfo(
    +                                new UnassignedInfo(
    +                                    UnassignedInfo.Reason.NODE_LEFT,
    +                                    null,
    +                                    null,
    +                                    0,
    +                                    0,
    +                                    0,
    +                                    false,
    +                                    UnassignedInfo.AllocationStatus.NO_ATTEMPT,
    +                                    Set.of(),
    +                                    lastAllocatedNodeId
    +                                )
    +                            )
    +                            .withAllocationId(AllocationId.newInitializing(inSyncIds.get(shard * (replicas + 1))))
    +                            .build()
    +                    );
                     }
     
    -                indexRoutingTableBuilder.addShard(
    -                    newShardRouting(
    -                        shardId,
    -                        primaryNodeId,
    -                        null,
    -                        true,
    -                        primaryNodeId == null ? UNASSIGNED : STARTED,
    -                        AllocationId.newInitializing(inSyncIds.get(shard * (replicas + 1)))
    -                    )
    -                );
                     for (int replica = 0; replica < replicas; replica++) {
                         var replicaNodeId = primaryNodeId == null ? null : pickAndRemoveRandomValueFrom(remainingNodeIds);
                         shardSizes.put(shardIdentifierFromRouting(shardId, false), thisShardSize);
    @@ -660,14 +681,9 @@ public void testDesiredBalanceShouldConvergeInABigCluster() {
                         }
     
                         indexRoutingTableBuilder.addShard(
    -                        newShardRouting(
    -                            shardId,
    -                            replicaNodeId,
    -                            null,
    -                            false,
    -                            replicaNodeId == null ? UNASSIGNED : STARTED,
    +                        shardRoutingBuilder(shardId, replicaNodeId, false, replicaNodeId == null ? UNASSIGNED : STARTED).withAllocationId(
                                 AllocationId.newInitializing(inSyncIds.get(shard * (replicas + 1) + 1 + replica))
    -                        )
    +                        ).build()
                         );
                     }
     
    @@ -903,7 +919,11 @@ public void testAccountForSizeOfMisplacedShardsDuringNewComputation() {
                     ShardId index2ShardId = shardIdFrom(indexMetadata2, 0);
                     routingTableBuilder.add(
                         IndexRoutingTable.builder(indexMetadata2.getIndex())
    -                        .addShard(newShardRouting(index2ShardId, "node-1", true, INITIALIZING, index2SnapshotRecoverySource))
    +                        .addShard(
    +                            shardRoutingBuilder(index2ShardId, "node-1", true, INITIALIZING).withRecoverySource(
    +                                index2SnapshotRecoverySource
    +                            ).build()
    +                        )
                     );
                     if (randomBoolean()) {
                         // Shard is 75% downloaded
    @@ -917,7 +937,11 @@ public void testAccountForSizeOfMisplacedShardsDuringNewComputation() {
                     ShardId index2ShardId = shardIdFrom(indexMetadata2, 0);
                     routingTableBuilder.add(
                         IndexRoutingTable.builder(indexMetadata2.getIndex())
    -                        .addShard(newShardRouting(index2ShardId, "node-2", true, INITIALIZING, index2SnapshotRecoverySource))
    +                        .addShard(
    +                            shardRoutingBuilder(index2ShardId, "node-2", true, INITIALIZING).withRecoverySource(
    +                                index2SnapshotRecoverySource
    +                            ).build()
    +                        )
                     );
                     if (randomBoolean()) {
                         // Shard is 75% downloaded
    @@ -1154,12 +1178,6 @@ public void testAccountForSizeOfAllInitializingShardsDuringAllocation() {
             );
         }
     
    -    @Deprecated
    -    private static ClusterInfo createClusterInfo(List diskUsages, Map shardSizes) {
    -        var diskUsage = diskUsages.stream().collect(toMap(DiskUsage::getNodeId, Function.identity()));
    -        return new ClusterInfo(diskUsage, diskUsage, shardSizes, Map.of(), Map.of(), Map.of());
    -    }
    -
         private static class ClusterInfoTestBuilder {
     
             private final Map diskUsage = new HashMap<>();
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
    index 3c58eb8c57573..5a7188fd4b5ca 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
    @@ -65,7 +65,7 @@
     import java.util.function.Predicate;
     
     import static org.elasticsearch.cluster.routing.AllocationId.newInitializing;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
     import static org.elasticsearch.common.settings.ClusterSettings.createBuiltInClusterSettings;
     import static org.hamcrest.Matchers.equalTo;
    @@ -248,8 +248,14 @@ public void testShouldNotRemoveAllocationDelayMarkersOnReconcile() {
                 .build();
             var shardId = new ShardId(index.getIndex(), 0);
             var indexRoutingTable = IndexRoutingTable.builder(index.getIndex())
    -            .addShard(newShardRouting(shardId, LOCAL_NODE_ID, null, true, ShardRoutingState.STARTED, newInitializing(inSyncAllocationId)))
    -            .addShard(newShardRouting(shardId, null, null, false, ShardRoutingState.UNASSIGNED, delayedUnasssignedInfo))
    +            .addShard(
    +                shardRoutingBuilder(shardId, LOCAL_NODE_ID, true, ShardRoutingState.STARTED).withAllocationId(
    +                    newInitializing(inSyncAllocationId)
    +                ).build()
    +            )
    +            .addShard(
    +                shardRoutingBuilder(shardId, null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo(delayedUnasssignedInfo).build()
    +            )
                 .build();
     
             var initialState = ClusterState.builder(new ClusterName(ClusterServiceUtils.class.getSimpleName()))
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
    index 2b182b29c9971..716e7c80a6cde 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
    @@ -595,8 +595,8 @@ public void testAverageUsage() {
             usages.put("node3", new DiskUsage("node3", "n3", "/dev/null", 100, 0));  // 100% used
     
             DiskUsage node1Usage = DiskThresholdDecider.averageUsage(rn, usages);
    -        assertThat(node1Usage.getTotalBytes(), equalTo(100L));
    -        assertThat(node1Usage.getFreeBytes(), equalTo(25L));
    +        assertThat(node1Usage.totalBytes(), equalTo(100L));
    +        assertThat(node1Usage.freeBytes(), equalTo(25L));
         }
     
         private void doTestShardRelocationsTakenIntoAccount(boolean testMaxHeadroom) {
    @@ -1231,7 +1231,8 @@ private void doTestDiskThresholdWithSnapshotShardSizes(boolean testMaxHeadroom)
                 logger.info("--> simulating snapshot shards size retrieval success");
                 snapshotShardSizes.put(snapshotShard, shardSizeInBytes);
                 logger.info("--> shard allocation depends on its size");
    -            shouldAllocate = shardSizeInBytes < usages.get("node1").getFreeBytes();
    +            DiskUsage usage = usages.get("node1");
    +            shouldAllocate = shardSizeInBytes < usage.freeBytes();
             } else {
                 logger.info("--> simulating snapshot shards size retrieval failure");
                 snapshotShardSizes.put(snapshotShard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
    diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
    index 74d8bc62ff203..9602dc165b644 100644
    --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
    +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
    @@ -51,7 +51,7 @@
     import static java.util.Collections.emptySet;
     import static org.elasticsearch.cluster.ClusterInfo.shardIdentifierFromRouting;
     import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING;
     import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE;
     import static org.hamcrest.Matchers.containsString;
    @@ -527,13 +527,12 @@ public void testTakesIntoAccountExpectedSizeForInitializingSearchableSnapshots()
             for (int i = 0; i < searchableSnapshotIndex.getNumberOfShards(); i++) {
                 long expectedSize = randomLongBetween(10, 50);
                 // a searchable snapshot shard without corresponding entry in cluster info
    -            ShardRouting startedShardWithExpectedSize = newShardRouting(
    +            ShardRouting startedShardWithExpectedSize = shardRoutingBuilder(
                     new ShardId(searchableSnapshotIndex.getIndex(), i),
                     nodeId,
                     true,
    -                ShardRoutingState.STARTED,
    -                expectedSize
    -            );
    +                ShardRoutingState.STARTED
    +            ).withExpectedShardSize(expectedSize).build();
                 searchableSnapshotIndexRoutingTableBuilder.addShard(startedShardWithExpectedSize);
                 unaccountedSearchableSnapshotSizes += expectedSize;
             }
    @@ -541,13 +540,12 @@ public void testTakesIntoAccountExpectedSizeForInitializingSearchableSnapshots()
             for (int i = 0; i < searchableSnapshotIndex.getNumberOfShards(); i++) {
                 var shardSize = randomLongBetween(10, 50);
                 // a shard relocating to this node
    -            ShardRouting initializingShard = newShardRouting(
    +            ShardRouting initializingShard = shardRoutingBuilder(
                     new ShardId(regularIndex.getIndex(), i),
                     nodeId,
                     true,
    -                ShardRoutingState.INITIALIZING,
    -                PeerRecoverySource.INSTANCE
    -            );
    +                ShardRoutingState.INITIALIZING
    +            ).withRecoverySource(PeerRecoverySource.INSTANCE).build();
                 regularIndexRoutingTableBuilder.addShard(initializingShard);
                 knownShardSizes.put(shardIdentifierFromRouting(initializingShard), shardSize);
                 relocatingShardsSizes += shardSize;
    diff --git a/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java b/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java
    index 0b8085fc6d9a7..fb61ece0b469b 100644
    --- a/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java
    +++ b/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java
    @@ -11,29 +11,16 @@
     import org.apache.logging.log4j.Level;
     import org.apache.logging.log4j.LogManager;
     import org.apache.logging.log4j.Logger;
    -import org.apache.logging.log4j.core.LogEvent;
    -import org.apache.logging.log4j.core.appender.AbstractAppender;
    -import org.apache.logging.log4j.core.config.Property;
     import org.elasticsearch.common.ReferenceDocs;
     import org.elasticsearch.common.bytes.BytesArray;
    -import org.elasticsearch.common.bytes.BytesReference;
    -import org.elasticsearch.common.io.stream.BytesStreamOutput;
    -import org.elasticsearch.core.CheckedRunnable;
    -import org.elasticsearch.core.Streams;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.junit.annotations.TestLogging;
     
    -import java.io.ByteArrayInputStream;
     import java.io.IOException;
     import java.io.OutputStream;
     import java.nio.charset.StandardCharsets;
     import java.util.Arrays;
    -import java.util.Base64;
     import java.util.stream.IntStream;
    -import java.util.zip.GZIPInputStream;
    -
    -import static org.hamcrest.Matchers.greaterThan;
    -import static org.hamcrest.Matchers.lessThanOrEqualTo;
     
     public class ChunkedLoggingStreamTests extends ESTestCase {
     
    @@ -56,7 +43,7 @@ private static void runChunkingTest(int size) {
             final var prefix = randomAlphaOfLength(10);
             final var level = randomFrom(Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
             final var referenceDocs = randomFrom(ReferenceDocs.values());
    -        assertEquals(expectedBody, getLoggedBody(logger, level, prefix, referenceDocs, () -> {
    +        assertEquals(expectedBody, ChunkedLoggingStreamTestUtils.getLoggedBody(logger, level, prefix, referenceDocs, () -> {
                 try (var stream = new ChunkedLoggingStream(logger, level, prefix, referenceDocs)) {
                     writeRandomly(stream, bytes);
                 }
    @@ -68,117 +55,14 @@ public void testEncodingRoundTrip() {
             final var bytes = randomByteArrayOfLength(between(0, 10000));
             final var level = randomFrom(Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
             final var referenceDocs = randomFrom(ReferenceDocs.values());
    -        assertEquals(new BytesArray(bytes), getDecodedLoggedBody(logger, level, "prefix", referenceDocs, () -> {
    -            try (var stream = ChunkedLoggingStream.create(logger, level, "prefix", referenceDocs)) {
    -                writeRandomly(stream, bytes);
    -            }
    -        }));
    -    }
    -
    -    private static String getLoggedBody(
    -        Logger captureLogger,
    -        final Level level,
    -        String prefix,
    -        final ReferenceDocs referenceDocs,
    -        CheckedRunnable runnable
    -    ) {
    -        class ChunkReadingAppender extends AbstractAppender {
    -            final StringBuilder encodedResponseBuilder = new StringBuilder();
    -            int chunks;
    -            boolean seenTotal;
    -
    -            ChunkReadingAppender() {
    -                super("mock", null, null, false, Property.EMPTY_ARRAY);
    -            }
    -
    -            @Override
    -            public void append(LogEvent event) {
    -                if (event.getLevel() != level) {
    -                    return;
    -                }
    -                if (event.getLoggerName().equals(captureLogger.getName()) == false) {
    -                    return;
    -                }
    -                assertFalse(seenTotal);
    -                final var message = event.getMessage().getFormattedMessage();
    -                final var onePartPrefix = prefix + " (gzip compressed and base64-encoded; for details see " + referenceDocs + "): ";
    -                final var partPrefix = prefix + " [part " + (chunks + 1) + "]: ";
    -                if (message.startsWith(partPrefix)) {
    -                    chunks += 1;
    -                    final var chunk = message.substring(partPrefix.length());
    -                    assertThat(chunk.length(), lessThanOrEqualTo(ChunkedLoggingStream.CHUNK_SIZE));
    -                    encodedResponseBuilder.append(chunk);
    -                } else if (message.startsWith(onePartPrefix)) {
    -                    assertEquals(0, chunks);
    -                    chunks += 1;
    -                    final var chunk = message.substring(onePartPrefix.length());
    -                    assertThat(chunk.length(), lessThanOrEqualTo(ChunkedLoggingStream.CHUNK_SIZE));
    -                    encodedResponseBuilder.append(chunk);
    -                    seenTotal = true;
    -                } else {
    -                    assertEquals(
    -                        prefix
    -                            + " (gzip compressed, base64-encoded, and split into "
    -                            + chunks
    -                            + " parts on preceding log lines; for details see "
    -                            + referenceDocs
    -                            + ")",
    -                        message
    -                    );
    -                    assertThat(chunks, greaterThan(1));
    -                    seenTotal = true;
    +        assertEquals(
    +            new BytesArray(bytes),
    +            ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(logger, level, "prefix", referenceDocs, () -> {
    +                try (var stream = ChunkedLoggingStream.create(logger, level, "prefix", referenceDocs)) {
    +                    writeRandomly(stream, bytes);
                     }
    -            }
    -        }
    -
    -        final var appender = new ChunkReadingAppender();
    -        try {
    -            appender.start();
    -            Loggers.addAppender(captureLogger, appender);
    -            runnable.run();
    -        } catch (Exception e) {
    -            fail(e);
    -        } finally {
    -            Loggers.removeAppender(captureLogger, appender);
    -            appender.stop();
    -        }
    -
    -        assertThat(appender.chunks, greaterThan(0));
    -        assertTrue(appender.seenTotal);
    -
    -        return appender.encodedResponseBuilder.toString();
    -    }
    -
    -    /**
    -     * Test utility function which captures the logged output from a {@link ChunkedLoggingStream}, combines the chunks, Base64-decodes it
    -     * and Gzip-decompresses it to retrieve the original data.
    -     *
    -     * @param captureLogger The logger whose output should be captured.
    -     * @param level         The log level for the data.
    -     * @param prefix        The prefix used by the logging stream.
    -     * @param referenceDocs A link to the reference docs about the output.
    -     * @param runnable      The action which emits the logs.
    -     * @return              A {@link BytesReference} containing the captured data.
    -     */
    -    public static BytesReference getDecodedLoggedBody(
    -        Logger captureLogger,
    -        Level level,
    -        String prefix,
    -        ReferenceDocs referenceDocs,
    -        CheckedRunnable runnable
    -    ) {
    -        final var loggedBody = getLoggedBody(captureLogger, level, prefix, referenceDocs, runnable);
    -
    -        try (
    -            var bytesStreamOutput = new BytesStreamOutput();
    -            var byteArrayInputStream = new ByteArrayInputStream(Base64.getDecoder().decode(loggedBody));
    -            var gzipInputStream = new GZIPInputStream(byteArrayInputStream)
    -        ) {
    -            Streams.copy(gzipInputStream, bytesStreamOutput);
    -            return bytesStreamOutput.bytes();
    -        } catch (Exception e) {
    -            return fail(e);
    -        }
    +            })
    +        );
         }
     
         private static void writeRandomly(OutputStream stream, byte[] bytes) throws IOException {
    diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
    index 1b52a474cbac9..e9acdb973f4bf 100644
    --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
    +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
    @@ -547,34 +547,52 @@ public void testAddConsumerAffix() {
                 "list",
                 (k) -> Setting.listSetting(k, Arrays.asList("1"), Integer::parseInt, Property.Dynamic, Property.NodeScope)
             );
    -        AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(intSetting, listSetting)));
    +        Setting.AffixSetting fallbackSetting = Setting.prefixKeySetting(
    +            "baz.",
    +            "bar.",
    +            (ns, k) -> Setting.boolSetting(k, false, Property.Dynamic, Property.NodeScope)
    +        );
    +        AbstractScopedSettings service = new ClusterSettings(
    +            Settings.EMPTY,
    +            new HashSet<>(Arrays.asList(intSetting, listSetting, fallbackSetting))
    +        );
             Map> listResults = new HashMap<>();
             Map intResults = new HashMap<>();
    +        Map fallbackResults = new HashMap<>();
     
             BiConsumer intConsumer = intResults::put;
             BiConsumer> listConsumer = listResults::put;
    +        BiConsumer fallbackConsumer = fallbackResults::put;
     
             service.addAffixUpdateConsumer(listSetting, listConsumer, (s, k) -> {});
             service.addAffixUpdateConsumer(intSetting, intConsumer, (s, k) -> {});
    +        service.addAffixUpdateConsumer(fallbackSetting, fallbackConsumer, (s, k) -> {});
             assertEquals(0, listResults.size());
             assertEquals(0, intResults.size());
    +        assertEquals(0, fallbackResults.size());
             service.applySettings(
                 Settings.builder()
                     .put("foo.test.bar", 2)
                     .put("foo.test_1.bar", 7)
                     .putList("foo.test_list.list", "16", "17")
                     .putList("foo.test_list_1.list", "18", "19", "20")
    +                .put("bar.abc", true)
    +                .put("baz.def", true)
                     .build()
             );
             assertEquals(2, intResults.get("test").intValue());
             assertEquals(7, intResults.get("test_1").intValue());
             assertEquals(Arrays.asList(16, 17), listResults.get("test_list"));
             assertEquals(Arrays.asList(18, 19, 20), listResults.get("test_list_1"));
    +        assertEquals(true, fallbackResults.get("abc"));
    +        assertEquals(true, fallbackResults.get("def"));
             assertEquals(2, listResults.size());
             assertEquals(2, intResults.size());
    +        assertEquals(2, fallbackResults.size());
     
             listResults.clear();
             intResults.clear();
    +        fallbackResults.clear();
     
             service.applySettings(
                 Settings.builder()
    @@ -582,12 +600,16 @@ public void testAddConsumerAffix() {
                     .put("foo.test_1.bar", 8)
                     .putList("foo.test_list.list", "16", "17")
                     .putNull("foo.test_list_1.list")
    +                .put("bar.abc", true)
    +                .put("baz.xyz", true)
                     .build()
             );
             assertNull("test wasn't changed", intResults.get("test"));
             assertEquals(8, intResults.get("test_1").intValue());
             assertNull("test_list wasn't changed", listResults.get("test_list"));
             assertEquals(Arrays.asList(1), listResults.get("test_list_1")); // reset to default
    +        assertNull("abc wasn't changed", fallbackResults.get("abc"));
    +        assertEquals(true, fallbackResults.get("xyz"));
             assertEquals(1, listResults.size());
             assertEquals(1, intResults.size());
         }
    diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
    index e5863988a76e5..1b3d741a6ea44 100644
    --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
    +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
    @@ -268,6 +268,18 @@ public void testValidator() {
             assertTrue(FooBarValidator.invokedWithDependencies);
         }
     
    +    public void testDuplicateSettingsPrefersPrimary() {
    +        Setting fooBar = new Setting<>("foo.bar", new Setting<>("baz.qux", "", Function.identity()), Function.identity());
    +        assertThat(
    +            fooBar.get(Settings.builder().put("foo.bar", "primaryUsed").put("baz.qux", "fallbackUsed").build()),
    +            equalTo("primaryUsed")
    +        );
    +        assertThat(
    +            fooBar.get(Settings.builder().put("baz.qux", "fallbackUsed").put("foo.bar", "primaryUsed").build()),
    +            equalTo("primaryUsed")
    +        );
    +    }
    +
         public void testValidatorForFilteredStringSetting() {
             final Setting filteredStringSetting = new Setting<>("foo.bar", "foobar", Function.identity(), value -> {
                 throw new SettingsException("validate always fails");
    @@ -802,6 +814,30 @@ public void testDynamicKeySetting() {
             }
         }
     
    +    public void testPrefixKeySettingFallbackAsMap() {
    +        Setting.AffixSetting setting = Setting.prefixKeySetting(
    +            "foo.",
    +            "bar.",
    +            (ns, key) -> Setting.boolSetting(key, false, Property.NodeScope)
    +        );
    +
    +        assertTrue(setting.match("foo.bar"));
    +        assertTrue(setting.match("bar.bar"));
    +
    +        Map map = setting.getAsMap(Settings.builder().put("foo.bar", "true").build());
    +        assertEquals(1, map.size());
    +        assertTrue(map.get("bar"));
    +
    +        map = setting.getAsMap(Settings.builder().put("bar.bar", "true").build());
    +        assertEquals(1, map.size());
    +        assertTrue(map.get("bar"));
    +
    +        // Prefer primary
    +        map = setting.getAsMap(Settings.builder().put("foo.bar", "false").put("bar.bar", "true").build());
    +        assertEquals(1, map.size());
    +        assertFalse(map.get("bar"));
    +    }
    +
         public void testAffixKeySetting() {
             Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope));
             assertTrue(setting.hasComplexMatcher());
    @@ -824,6 +860,12 @@ public void testAffixKeySetting() {
             );
             assertEquals("prefix must end with a '.'", exc.getMessage());
     
    +        exc = expectThrows(
    +            IllegalArgumentException.class,
    +            () -> Setting.prefixKeySetting("foo.", "bar", (ns, key) -> Setting.boolSetting(key, false, Property.NodeScope))
    +        );
    +        assertEquals("prefix must end with a '.'", exc.getMessage());
    +
             Setting> listAffixSetting = Setting.affixKeySetting(
                 "foo.",
                 "bar",
    diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
    index fb9bde31e8fc4..7b4357262edd3 100644
    --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
    +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
    @@ -8,15 +8,20 @@
     
     package org.elasticsearch.common.util.concurrent;
     
    +import org.elasticsearch.action.ActionListener;
    +import org.elasticsearch.action.ActionRunnable;
     import org.elasticsearch.common.settings.Setting;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.unit.Processors;
     import org.elasticsearch.test.ESTestCase;
    +import org.elasticsearch.threadpool.ThreadPool;
     import org.hamcrest.Matcher;
     
     import java.util.Locale;
     import java.util.concurrent.CountDownLatch;
     import java.util.concurrent.CyclicBarrier;
    +import java.util.concurrent.Executor;
    +import java.util.concurrent.ExecutorService;
     import java.util.concurrent.ThreadPoolExecutor;
     import java.util.concurrent.TimeUnit;
     import java.util.concurrent.atomic.AtomicBoolean;
    @@ -501,4 +506,166 @@ public void testNodeProcessorsFloatValidation() {
             }
         }
     
    +    // This test must complete to ensure that our basic infrastructure is working as expected.
    +    // Specifically that ExecutorScalingQueue, which subclasses LinkedTransferQueue, correctly
    +    // tracks tasks submitted to the executor.
    +    public void testBasicTaskExecution() {
    +        final var executorService = EsExecutors.newScaling(
    +            "test",
    +            0,
    +            between(1, 5),
    +            60,
    +            TimeUnit.SECONDS,
    +            randomBoolean(),
    +            EsExecutors.daemonThreadFactory("test"),
    +            new ThreadContext(Settings.EMPTY)
    +        );
    +        try {
    +            final var countDownLatch = new CountDownLatch(between(1, 10));
    +            class TestTask extends AbstractRunnable {
    +                @Override
    +                protected void doRun() {
    +                    countDownLatch.countDown();
    +                    if (countDownLatch.getCount() > 0) {
    +                        executorService.execute(TestTask.this);
    +                    }
    +                }
    +
    +                @Override
    +                public void onFailure(Exception e) {
    +                    fail(e);
    +                }
    +            }
    +
    +            executorService.execute(new TestTask());
    +            safeAwait(countDownLatch);
    +        } finally {
    +            ThreadPool.terminate(executorService, 10, TimeUnit.SECONDS);
    +        }
    +    }
    +
    +    public void testScalingDropOnShutdown() {
    +        final var executor = EsExecutors.newScaling(
    +            getName(),
    +            0,
    +            between(1, 5),
    +            60,
    +            TimeUnit.SECONDS,
    +            false,
    +            EsExecutors.daemonThreadFactory(getName()),
    +            new ThreadContext(Settings.EMPTY)
    +        );
    +        ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
    +        executor.execute(() -> fail("should not run")); // no-op
    +        executor.execute(new AbstractRunnable() {
    +            @Override
    +            public void onFailure(Exception e) {
    +                fail("should not call onFailure");
    +            }
    +
    +            @Override
    +            protected void doRun() {
    +                fail("should not call doRun");
    +            }
    +
    +            @Override
    +            public boolean isForceExecution() {
    +                return randomBoolean();
    +            }
    +
    +            @Override
    +            public void onRejection(Exception e) {
    +                fail("should not call onRejection");
    +            }
    +
    +            @Override
    +            public void onAfter() {
    +                fail("should not call onAfter");
    +            }
    +        });
    +    }
    +
    +    public void testScalingRejectOnShutdown() {
    +        runRejectOnShutdownTest(
    +            EsExecutors.newScaling(
    +                getName(),
    +                0,
    +                between(1, 5),
    +                60,
    +                TimeUnit.SECONDS,
    +                true,
    +                EsExecutors.daemonThreadFactory(getName()),
    +                new ThreadContext(Settings.EMPTY)
    +            )
    +        );
    +    }
    +
    +    public void testFixedBoundedRejectOnShutdown() {
    +        runRejectOnShutdownTest(
    +            EsExecutors.newFixed(
    +                getName(),
    +                between(1, 5),
    +                between(1, 5),
    +                EsExecutors.daemonThreadFactory(getName()),
    +                threadContext,
    +                randomFrom(DEFAULT, DO_NOT_TRACK)
    +            )
    +        );
    +    }
    +
    +    public void testFixedUnboundedRejectOnShutdown() {
    +        runRejectOnShutdownTest(
    +            EsExecutors.newFixed(
    +                getName(),
    +                between(1, 5),
    +                -1,
    +                EsExecutors.daemonThreadFactory(getName()),
    +                threadContext,
    +                randomFrom(DEFAULT, DO_NOT_TRACK)
    +            )
    +        );
    +    }
    +
    +    private static void runRejectOnShutdownTest(ExecutorService executor) {
    +        for (int i = between(0, 10); i > 0; i--) {
    +            final var delayMillis = between(0, 100);
    +            executor.execute(ActionRunnable.wrap(ActionListener.noop(), l -> safeSleep(delayMillis)));
    +        }
    +        try {
    +            executor.shutdown();
    +            assertShutdownAndRejectingTasks(executor);
    +        } finally {
    +            ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
    +        }
    +        assertShutdownAndRejectingTasks(executor);
    +    }
    +
    +    private static void assertShutdownAndRejectingTasks(Executor executor) {
    +        final var rejected = new AtomicBoolean();
    +        final var shouldBeRejected = new AbstractRunnable() {
    +            @Override
    +            public void onFailure(Exception e) {
    +                fail("should not call onFailure");
    +            }
    +
    +            @Override
    +            protected void doRun() {
    +                fail("should not call doRun");
    +            }
    +
    +            @Override
    +            public boolean isForceExecution() {
    +                return randomBoolean();
    +            }
    +
    +            @Override
    +            public void onRejection(Exception e) {
    +                assertTrue(asInstanceOf(EsRejectedExecutionException.class, e).isExecutorShutdown());
    +                assertTrue(rejected.compareAndSet(false, true));
    +            }
    +        };
    +        assertTrue(expectThrows(EsRejectedExecutionException.class, () -> executor.execute(shouldBeRejected::doRun)).isExecutorShutdown());
    +        executor.execute(shouldBeRejected);
    +        assertTrue(rejected.get());
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java
    new file mode 100644
    index 0000000000000..b1e1b9d620d2a
    --- /dev/null
    +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java
    @@ -0,0 +1,35 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.common.util.concurrent;
    +
    +import org.elasticsearch.test.ESTestCase;
    +
    +import java.util.concurrent.TimeUnit;
    +
    +public class ExecutorScalingQueueTests extends ESTestCase {
    +
    +    public void testPut() {
    +        var queue = new EsExecutors.ExecutorScalingQueue<>();
    +        queue.put(new Object());
    +        assertEquals(queue.size(), 1);
    +    }
    +
    +    public void testAdd() {
    +        var queue = new EsExecutors.ExecutorScalingQueue<>();
    +        assertTrue(queue.add(new Object()));
    +        assertEquals(queue.size(), 1);
    +    }
    +
    +    public void testTimedOffer() {
    +        var queue = new EsExecutors.ExecutorScalingQueue<>();
    +        assertTrue(queue.offer(new Object(), 60, TimeUnit.SECONDS));
    +        assertEquals(queue.size(), 1);
    +    }
    +
    +}
    diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java
    index ce5841d066d88..016e4df9422d1 100644
    --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java
    +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java
    @@ -542,7 +542,7 @@ public PeersResponse read(StreamInput in) throws IOException {
                     }
     
                     @Override
    -                public Executor executor(ThreadPool threadPool) {
    +                public Executor executor() {
                         return TransportResponseHandler.TRANSPORT_WORKER;
                     }
     
    @@ -883,6 +883,48 @@ public boolean innerMatch(LogEvent event) {
             }
         }
     
    +    @TestLogging(reason = "testing logging at WARN level", value = "org.elasticsearch.discovery:WARN")
    +    public void testEventuallyLogsIfReturnedMasterIsUnreachable() {
    +        final DiscoveryNode otherNode = newDiscoveryNode("node-from-hosts-list");
    +        providedAddresses.add(otherNode.getAddress());
    +        transportAddressConnector.addReachableNode(otherNode);
    +
    +        peerFinder.activate(lastAcceptedNodes);
    +        final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + VERBOSITY_INCREASE_TIMEOUT_SETTING.get(Settings.EMPTY).millis()
    +            + PeerFinder.DISCOVERY_FIND_PEERS_INTERVAL_SETTING.get(Settings.EMPTY).millis();
    +
    +        runAllRunnableTasks();
    +
    +        assertFoundPeers(otherNode);
    +        final DiscoveryNode unreachableMaster = newDiscoveryNode("unreachable-master");
    +        transportAddressConnector.unreachableAddresses.add(unreachableMaster.getAddress());
    +
    +        MockLogAppender.assertThatLogger(() -> {
    +            while (deterministicTaskQueue.getCurrentTimeMillis() <= endTime) {
    +                deterministicTaskQueue.advanceTime();
    +                runAllRunnableTasks();
    +                respondToRequests(node -> {
    +                    assertThat(node, is(otherNode));
    +                    return new PeersResponse(Optional.of(unreachableMaster), emptyList(), randomNonNegativeLong());
    +                });
    +            }
    +        },
    +            PeerFinder.class,
    +            new MockLogAppender.SeenEventExpectation(
    +                "discovery result",
    +                "org.elasticsearch.discovery.PeerFinder",
    +                Level.WARN,
    +                "address ["
    +                    + unreachableMaster.getAddress()
    +                    + "]* [current master according to *node-from-hosts-list*ClusterFormationFailureHelper*discovery-troubleshooting.html*"
    +            )
    +        );
    +
    +        assertFoundPeers(otherNode);
    +        assertThat(peerFinder.discoveredMasterNode, nullValue());
    +        assertFalse(peerFinder.discoveredMasterTerm.isPresent());
    +    }
    +
         public void testReconnectsToDisconnectedNodes() {
             final DiscoveryNode otherNode = newDiscoveryNode("original-node");
             providedAddresses.add(otherNode.getAddress());
    diff --git a/server/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/server/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
    index d43812a78e131..697d13f73a9e5 100644
    --- a/server/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
    @@ -11,7 +11,6 @@
     import org.elasticsearch.cluster.routing.RoutingNodes;
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.core.Strings;
     import org.elasticsearch.index.Index;
    @@ -24,6 +23,7 @@
     import java.util.List;
     import java.util.Map;
     
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo;
     import static org.hamcrest.Matchers.greaterThan;
     import static org.mockito.Mockito.mock;
    @@ -33,24 +33,12 @@ public class PriorityComparatorTests extends ESTestCase {
         public void testPreferNewIndices() {
             RoutingNodes.UnassignedShards shards = new RoutingNodes.UnassignedShards(mock(RoutingNodes.class));
             List shardRoutings = Arrays.asList(
    -            TestShardRouting.newShardRouting(
    -                "oldest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            shardRoutingBuilder("oldest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            ),
    -            TestShardRouting.newShardRouting(
    -                "newest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            ).build(),
    +            shardRoutingBuilder("newest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            )
    +            ).build()
             );
             Collections.shuffle(shardRoutings, random());
             for (ShardRouting routing : shardRoutings) {
    @@ -82,24 +70,12 @@ protected IndexMetadata getMetadata(Index index) {
         public void testPreferPriorityIndices() {
             RoutingNodes.UnassignedShards shards = new RoutingNodes.UnassignedShards(mock(RoutingNodes.class));
             List shardRoutings = Arrays.asList(
    -            TestShardRouting.newShardRouting(
    -                "oldest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            shardRoutingBuilder("oldest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            ),
    -            TestShardRouting.newShardRouting(
    -                "newest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            ).build(),
    +            shardRoutingBuilder("newest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            )
    +            ).build()
             );
             Collections.shuffle(shardRoutings, random());
             for (ShardRouting routing : shardRoutings) {
    @@ -131,24 +107,12 @@ protected IndexMetadata getMetadata(Index index) {
         public void testPreferSystemIndices() {
             RoutingNodes.UnassignedShards shards = new RoutingNodes.UnassignedShards(mock(RoutingNodes.class));
             List shardRoutings = Arrays.asList(
    -            TestShardRouting.newShardRouting(
    -                "oldest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            shardRoutingBuilder("oldest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            ),
    -            TestShardRouting.newShardRouting(
    -                "newest",
    -                0,
    -                null,
    -                null,
    -                randomBoolean(),
    -                ShardRoutingState.UNASSIGNED,
    +            ).build(),
    +            shardRoutingBuilder("newest", 0, null, randomBoolean(), ShardRoutingState.UNASSIGNED).withUnassignedInfo(
                     randomUnassignedInfo("foobar")
    -            )
    +            ).build()
             );
             Collections.shuffle(shardRoutings, random());
             for (ShardRouting routing : shardRoutings) {
    @@ -210,15 +174,13 @@ public void testPriorityComparatorSort() {
             for (int i = 0; i < numShards; i++) {
                 IndexMetadata indexMeta = randomFrom(indices);
                 shards.add(
    -                TestShardRouting.newShardRouting(
    +                shardRoutingBuilder(
                         indexMeta.getIndex().getName(),
                         randomIntBetween(1, 5),
                         null,
    -                    null,
                         randomBoolean(),
    -                    ShardRoutingState.UNASSIGNED,
    -                    randomUnassignedInfo("foobar")
    -                )
    +                    ShardRoutingState.UNASSIGNED
    +                ).withUnassignedInfo(randomUnassignedInfo("foobar")).build()
                 );
             }
             shards.sort(new PriorityComparator() {
    diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
    index 306779e90ef2e..e1cba6f1746e4 100644
    --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
    +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
    @@ -61,6 +61,7 @@
     
     import static java.util.Collections.unmodifiableMap;
     import static org.elasticsearch.cluster.routing.RoutingNodesHelper.shardsWithState;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.common.settings.ClusterSettings.createBuiltInClusterSettings;
     import static org.hamcrest.Matchers.empty;
     import static org.hamcrest.Matchers.equalTo;
    @@ -616,14 +617,9 @@ private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDecid
                         .addIndexShard(
                             new IndexShardRoutingTable.Builder(shardId).addShard(primaryShard)
                                 .addShard(
    -                                TestShardRouting.newShardRouting(
    -                                    shardId,
    -                                    node2.getId(),
    -                                    null,
    -                                    false,
    -                                    ShardRoutingState.INITIALIZING,
    +                                shardRoutingBuilder(shardId, node2.getId(), false, ShardRoutingState.INITIALIZING).withUnassignedInfo(
                                         unassignedInfo
    -                                )
    +                                ).build()
                                 )
                         )
                 )
    diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
    index 4f25e00f8c083..7f5c623dbae08 100644
    --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
    +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
    @@ -1013,7 +1013,6 @@ public void testStopForceClosesConnectionDuringRequest() throws Exception {
             }
         }
     
    -    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103782")
         public void testStopClosesChannelAfterRequest() throws Exception {
             var grace = LONG_GRACE_PERIOD_MS;
             try (var noTimeout = LogExpectation.unexpectedTimeout(grace); var transport = new TestHttpServerTransport(gracePeriod(grace))) {
    @@ -1143,6 +1142,7 @@ public Collection getRestHeaders() {
             return new ActionModule(
                 settings.getSettings(),
                 TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()),
    +            null,
                 settings.getIndexScopedSettings(),
                 settings.getClusterSettings(),
                 settings.getSettingsFilter(),
    @@ -1389,8 +1389,8 @@ public void assertExpectationsMatched() {
     
             @Override
             public void close() {
    -            appender.stop();
                 Loggers.removeAppender(mockLogger, appender);
    +            appender.stop();
                 if (checked == false) {
                     fail("did not check expectations matched in TimedOutLogExpectation");
                 }
    diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java
    index a22f17702b157..c49da619d7630 100644
    --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java
    +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java
    @@ -20,7 +20,7 @@
     import org.elasticsearch.common.io.stream.BytesStream;
     import org.elasticsearch.common.io.stream.BytesStreamOutput;
     import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput;
    -import org.elasticsearch.common.logging.ChunkedLoggingStreamTests;
    +import org.elasticsearch.common.logging.ChunkedLoggingStreamTestUtils;
     import org.elasticsearch.common.recycler.Recycler;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.util.BigArrays;
    @@ -543,12 +543,7 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
                     public String getResponseContentTypeString() {
                         return RestResponse.TEXT_CONTENT_TYPE;
                     }
    -
    -                @Override
    -                public void close() {
    -                    assertTrue(isClosed.compareAndSet(false, true));
    -                }
    -            }));
    +            }, () -> assertTrue(isClosed.compareAndSet(false, true))));
                 @SuppressWarnings("unchecked")
                 Class> listenerClass = (Class>) (Class) ActionListener.class;
                 ArgumentCaptor> listenerCaptor = ArgumentCaptor.forClass(listenerClass);
    @@ -713,7 +708,7 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBody co
             var responseBody = new BytesArray(randomUnicodeOfLengthBetween(1, 100).getBytes(StandardCharsets.UTF_8));
             assertEquals(
                 responseBody,
    -            ChunkedLoggingStreamTests.getDecodedLoggedBody(
    +            ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(
                     LogManager.getLogger(HttpTracerTests.HTTP_BODY_TRACER_LOGGER),
                     Level.TRACE,
                     "[" + request.getRequestId() + "] response body",
    @@ -725,7 +720,7 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBody co
             final var isClosed = new AtomicBoolean();
             assertEquals(
                 responseBody,
    -            ChunkedLoggingStreamTests.getDecodedLoggedBody(
    +            ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(
                     LogManager.getLogger(HttpTracerTests.HTTP_BODY_TRACER_LOGGER),
                     Level.TRACE,
                     "[" + request.getRequestId() + "] response body",
    @@ -750,12 +745,7 @@ public ReleasableBytesReference encodeChunk(int sizeHint, Recycler rec
                         public String getResponseContentTypeString() {
                             return RestResponse.TEXT_CONTENT_TYPE;
                         }
    -
    -                    @Override
    -                    public void close() {
    -                        assertTrue(isClosed.compareAndSet(false, true));
    -                    }
    -                }))
    +                }, () -> assertTrue(isClosed.compareAndSet(false, true))))
                 )
             );
     
    diff --git a/server/src/test/java/org/elasticsearch/http/HttpClientStatsTrackerTests.java b/server/src/test/java/org/elasticsearch/http/HttpClientStatsTrackerTests.java
    index 99e99540489c5..2dfaaf34bb1f1 100644
    --- a/server/src/test/java/org/elasticsearch/http/HttpClientStatsTrackerTests.java
    +++ b/server/src/test/java/org/elasticsearch/http/HttpClientStatsTrackerTests.java
    @@ -16,6 +16,7 @@
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.node.Node;
     import org.elasticsearch.rest.RestRequest;
    +import org.elasticsearch.telemetry.metric.MeterRegistry;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.rest.FakeRestRequest;
     import org.elasticsearch.threadpool.ThreadPool;
    @@ -437,7 +438,7 @@ private static class FakeTimeThreadPool extends ThreadPool {
             private final long absoluteTimeOffset = randomLong();
     
             FakeTimeThreadPool() {
    -            super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test").build());
    +            super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test").build(), MeterRegistry.NOOP);
                 stopCachedTimeThread();
                 setRandomTime();
             }
    diff --git a/server/src/test/java/org/elasticsearch/http/HttpTracerTests.java b/server/src/test/java/org/elasticsearch/http/HttpTracerTests.java
    index a513ff25f4432..16a902fe4315d 100644
    --- a/server/src/test/java/org/elasticsearch/http/HttpTracerTests.java
    +++ b/server/src/test/java/org/elasticsearch/http/HttpTracerTests.java
    @@ -12,7 +12,7 @@
     import org.apache.logging.log4j.LogManager;
     import org.elasticsearch.common.ReferenceDocs;
     import org.elasticsearch.common.bytes.BytesArray;
    -import org.elasticsearch.common.logging.ChunkedLoggingStreamTests;
    +import org.elasticsearch.common.logging.ChunkedLoggingStreamTestUtils;
     import org.elasticsearch.rest.RestRequest;
     import org.elasticsearch.rest.RestResponse;
     import org.elasticsearch.rest.RestStatus;
    @@ -98,7 +98,7 @@ public void testBodyLogging() {
     
             assertEquals(
                 responseBody,
    -            ChunkedLoggingStreamTests.getDecodedLoggedBody(
    +            ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(
                     LogManager.getLogger(HTTP_BODY_TRACER_LOGGER),
                     Level.TRACE,
                     "[" + request.getRequestId() + "] request body",
    diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
    index 16bf27207c130..1746d91fc20af 100644
    --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
    @@ -175,6 +175,7 @@
     import java.util.stream.StreamSupport;
     
     import static java.util.Collections.shuffle;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.common.lucene.Lucene.indexWriterConfigWithNoMerging;
     import static org.elasticsearch.index.engine.Engine.ES_VERSION;
     import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_RESET;
    @@ -2670,14 +2671,9 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException {
     
             try {
                 initialEngine = createEngine(defaultSettings, store, createTempDir(), newLogMergePolicy(), null);
    -            final ShardRouting primary = TestShardRouting.newShardRouting(
    -                shardId,
    -                "node1",
    -                null,
    -                true,
    -                ShardRoutingState.STARTED,
    +            final ShardRouting primary = shardRoutingBuilder(shardId, "node1", true, ShardRoutingState.STARTED).withAllocationId(
                     allocationId
    -            );
    +            ).build();
                 final ShardRouting initializingReplica = TestShardRouting.newShardRouting(
                     shardId,
                     "node2",
    diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java
    index 3185769bdab82..4d7ea709d565c 100644
    --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java
    +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java
    @@ -61,6 +61,14 @@ public static void pruneTombstones(LiveVersionMap map, long maxTimestampToPrune,
             map.pruneTombstones(maxTimestampToPrune, maxSeqNoToPrune);
         }
     
    +    public static long reclaimableRefreshRamBytes(LiveVersionMap map) {
    +        return map.reclaimableRefreshRamBytes();
    +    }
    +
    +    public static long refreshingBytes(LiveVersionMap map) {
    +        return map.getRefreshingBytes();
    +    }
    +
         public static IndexVersionValue randomIndexVersionValue() {
             return new IndexVersionValue(randomTranslogLocation(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
         }
    diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
    index 5ca7aadc35fa7..8d357413b09cd 100644
    --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java
    @@ -495,4 +495,25 @@ public void testVersionLookupRamBytesUsed() {
                 .sum();
             assertEquals(actualRamBytesUsed, vl.ramBytesUsed());
         }
    +
    +    public void testVersionMapReclaimableRamBytes() throws IOException {
    +        LiveVersionMap map = new LiveVersionMap();
    +        assertEquals(map.ramBytesUsedForRefresh(), 0L);
    +        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
    +        IntStream.range(0, randomIntBetween(10, 100)).forEach(i -> {
    +            BytesRefBuilder uid = new BytesRefBuilder();
    +            uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20));
    +            try (Releasable r = map.acquireLock(uid.toBytesRef())) {
    +                map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue());
    +            }
    +        });
    +        assertThat(map.reclaimableRefreshRamBytes(), greaterThan(0L));
    +        assertEquals(map.reclaimableRefreshRamBytes(), map.ramBytesUsedForRefresh());
    +        map.beforeRefresh();
    +        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
    +        assertThat(map.ramBytesUsedForRefresh(), greaterThan(0L));
    +        map.afterRefresh(randomBoolean());
    +        assertEquals(map.reclaimableRefreshRamBytes(), 0L);
    +        assertEquals(map.ramBytesUsedForRefresh(), 0L);
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java
    index 4abcae1c9de5c..5efdd4c79940c 100644
    --- a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java
    @@ -16,7 +16,6 @@
     import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.core.IOUtils;
     import org.elasticsearch.index.IndexSettings;
    @@ -34,6 +33,7 @@
     import java.util.Collections;
     import java.util.concurrent.atomic.AtomicLong;
     
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.greaterThanOrEqualTo;
     import static org.hamcrest.Matchers.instanceOf;
    @@ -62,7 +62,7 @@ public void testTwoNoopEngines() throws IOException {
         public void testNoopAfterRegularEngine() throws IOException {
             int docs = randomIntBetween(1, 10);
             ReplicationTracker tracker = (ReplicationTracker) engine.config().getGlobalCheckpointSupplier();
    -        ShardRouting routing = TestShardRouting.newShardRouting(shardId, "node", null, true, ShardRoutingState.STARTED, allocationId);
    +        ShardRouting routing = shardRoutingBuilder(shardId, "node", true, ShardRoutingState.STARTED).withAllocationId(allocationId).build();
             IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(shardId).addShard(routing).build();
             tracker.updateFromMaster(1L, Collections.singleton(allocationId.getId()), table);
             tracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED);
    @@ -157,7 +157,7 @@ public void testNoOpEngineStats() throws Exception {
     
         public void testTrimUnreferencedTranslogFiles() throws Exception {
             final ReplicationTracker tracker = (ReplicationTracker) engine.config().getGlobalCheckpointSupplier();
    -        ShardRouting routing = TestShardRouting.newShardRouting(shardId, "node", null, true, ShardRoutingState.STARTED, allocationId);
    +        ShardRouting routing = shardRoutingBuilder(shardId, "node", true, ShardRoutingState.STARTED).withAllocationId(allocationId).build();
             IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(shardId).addShard(routing).build();
             tracker.updateFromMaster(1L, Collections.singleton(allocationId.getId()), table);
             tracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED);
    diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java
    index 6a87c0f704600..6577148d78c7b 100644
    --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java
    @@ -9,8 +9,8 @@
     package org.elasticsearch.index.fieldstats;
     
     import org.elasticsearch.action.DocWriteResponse;
    -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
     import org.elasticsearch.action.search.SearchType;
    +import org.elasticsearch.action.support.broadcast.BroadcastResponse;
     import org.elasticsearch.index.query.QueryBuilders;
     import org.elasticsearch.indices.IndicesRequestCache;
     import org.elasticsearch.rest.RestStatus;
    @@ -88,7 +88,7 @@ private void assertRequestCacheStats(long expectedHits, long expectedMisses) {
         }
     
         private void refreshIndex() {
    -        RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("index").get();
    +        BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("index").get();
             assertThat(refreshResponse.getSuccessfulShards(), equalTo(refreshResponse.getSuccessfulShards()));
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java
    index 6d80be5167e52..70e2fee7a003a 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java
    @@ -264,7 +264,7 @@ public List invalidExample() throws IOException {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             // Just assert that we expect a boolean. Otherwise no munging.
             return v -> (Boolean) v;
         }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
    index 41628dac2faba..9e9437aa6b9db 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
    @@ -706,7 +706,7 @@ public void execute() {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return v -> ((Number) v).longValue();
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java
    index f0458add93c78..aab481b545879 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java
    @@ -62,8 +62,8 @@ public void testAddFields() throws Exception {
                 b.endObject();
             }));
     
    -        MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
    -        Mapping merged = MapperService.mergeMappings(stage1, stage2.mapping(), reason);
    +        MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE, MergeReason.MAPPING_AUTO_UPDATE);
    +        Mapping merged = MapperService.mergeMappings(stage1, stage2.mapping(), reason, Long.MAX_VALUE);
             // stage1 mapping should not have been modified
             assertThat(stage1.mappers().getMapper("age"), nullValue());
             assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue());
    @@ -81,26 +81,31 @@ public void testMergeObjectDynamic() throws Exception {
             DocumentMapper withDynamicMapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "false")));
             assertThat(withDynamicMapper.mapping().getRoot().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
     
    -        Mapping merged = MapperService.mergeMappings(mapper, withDynamicMapper.mapping(), MergeReason.MAPPING_UPDATE);
    +        Mapping merged = MapperService.mergeMappings(
    +            mapper,
    +            withDynamicMapper.mapping(),
    +            randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.MAPPING_AUTO_UPDATE),
    +            Long.MAX_VALUE
    +        );
             assertThat(merged.getRoot().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
         }
     
         public void testMergeObjectAndNested() throws Exception {
             DocumentMapper objectMapper = createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "object").endObject()));
             DocumentMapper nestedMapper = createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "nested").endObject()));
    -        MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
    +        MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE, MergeReason.MAPPING_AUTO_UPDATE);
     
             {
                 IllegalArgumentException e = expectThrows(
                     IllegalArgumentException.class,
    -                () -> MapperService.mergeMappings(objectMapper, nestedMapper.mapping(), reason)
    +                () -> MapperService.mergeMappings(objectMapper, nestedMapper.mapping(), reason, Long.MAX_VALUE)
                 );
                 assertThat(e.getMessage(), containsString("can't merge a non-nested mapping [obj] with a nested mapping"));
             }
             {
                 IllegalArgumentException e = expectThrows(
                     IllegalArgumentException.class,
    -                () -> MapperService.mergeMappings(nestedMapper, objectMapper.mapping(), reason)
    +                () -> MapperService.mergeMappings(nestedMapper, objectMapper.mapping(), reason, Long.MAX_VALUE)
                 );
                 assertThat(e.getMessage(), containsString("can't merge a non-nested mapping [obj] with a nested mapping"));
             }
    @@ -178,7 +183,11 @@ public void testConcurrentMergeTest() throws Throwable {
                         Mapping update = doc.dynamicMappingsUpdate();
                         assert update != null;
                         lastIntroducedFieldName.set(fieldName);
    -                    mapperService.merge("_doc", new CompressedXContent(update.toString()), MergeReason.MAPPING_UPDATE);
    +                    mapperService.merge(
    +                        "_doc",
    +                        new CompressedXContent(update.toString()),
    +                        randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.MAPPING_AUTO_UPDATE)
    +                    );
                     }
                 } catch (Exception e) {
                     error.set(e);
    @@ -235,11 +244,21 @@ public void testMergeMeta() throws IOException {
     
             DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
     
    -        Mapping merged = MapperService.mergeMappings(initMapper, updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
    +        Mapping merged = MapperService.mergeMappings(
    +            initMapper,
    +            updatedMapper.mapping(),
    +            randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.MAPPING_AUTO_UPDATE),
    +            Long.MAX_VALUE
    +        );
             assertThat(merged.getMeta().get("foo"), equalTo("bar"));
     
             updatedMapper = createDocumentMapper(topMapping(b -> b.startObject("_meta").field("foo", "new_bar").endObject()));
    -        merged = MapperService.mergeMappings(initMapper, updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
    +        merged = MapperService.mergeMappings(
    +            initMapper,
    +            updatedMapper.mapping(),
    +            randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.MAPPING_AUTO_UPDATE),
    +            Long.MAX_VALUE
    +        );
             assertThat(merged.getMeta().get("foo"), equalTo("new_bar"));
         }
     
    @@ -262,7 +281,7 @@ public void testMergeMetaForIndexTemplate() throws IOException {
             assertThat(initMapper.mapping().getMeta(), equalTo(expected));
     
             DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
    -        Mapping merged = MapperService.mergeMappings(initMapper, updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
    +        Mapping merged = MapperService.mergeMappings(initMapper, updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE);
             assertThat(merged.getMeta(), equalTo(expected));
     
             updatedMapper = createDocumentMapper(topMapping(b -> {
    @@ -278,7 +297,7 @@ public void testMergeMetaForIndexTemplate() throws IOException {
                 }
                 b.endObject();
             }));
    -        merged = merged.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
    +        merged = merged.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE);
     
             expected = Map.of("field", "value", "object", Map.of("field1", "value1", "field2", "new_value", "field3", "value3"));
             assertThat(merged.getMeta(), equalTo(expected));
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java
    index 07f4c3c1346c4..f816f403be89f 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java
    @@ -34,6 +34,7 @@ public void testParsing() throws IOException {
             );
             DocumentMapper mapper = createDocumentMapper(mapping);
             assertEquals(mapping, mapper.mappingSource().toString());
    +        assertEquals(2, mapper.mapping().getRoot().mapperSize());
         }
     
         public void testParsingWithMissingPath() {
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
    index 511ce94a09eec..3798129ccff29 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java
    @@ -56,7 +56,7 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed)
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return v -> {
                 // The test converts the float into a string so we do do
                 Number n = (Number) v;
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java
    index 3da059803014f..a5d705076561b 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java
    @@ -598,6 +598,11 @@ public void testScriptAndPrecludedParameters() {
     
         @Override
         protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
    +        return syntheticSourceSupport(ignoreMalformed, false);
    +    }
    +
    +    @Override
    +    protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed, boolean columnReader) {
             assumeFalse("synthetic _source for geo_point doesn't support ignore_malformed", ignoreMalformed);
             return new SyntheticSourceSupport() {
                 private final boolean ignoreZValue = usually();
    @@ -607,6 +612,9 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed)
                 public SyntheticSourceExample example(int maxVals) {
                     if (randomBoolean()) {
                         Tuple v = generateValue();
    +                    if (columnReader) {
    +                        return new SyntheticSourceExample(v.v1(), decode(encode(v.v2())), encode(v.v2()), this::mapping);
    +                    }
                         return new SyntheticSourceExample(v.v1(), v.v2(), v.v2().toWKT(), this::mapping);
                     }
                     List> values = randomList(1, maxVals, this::generateValue);
    @@ -616,12 +624,20 @@ public SyntheticSourceExample example(int maxVals) {
                         .sorted((a, b) -> Long.compare(encode(a.v2()), encode(b.v2())))
                         .toList();
                     List in = sorted.stream().map(Tuple::v1).toList();
    -                List outList = sorted.stream().map(v -> encode(v.v2())).sorted().map(this::decode).toList();
    +                List outList = sorted.stream().map(Tuple::v2).toList();
                     Object out = outList.size() == 1 ? outList.get(0) : outList;
     
    -                List outBlockList = outList.stream().map(GeoPoint::toWKT).toList();
    -                Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList;
    -                return new SyntheticSourceExample(in, out, outBlock, this::mapping);
    +                if (columnReader) {
    +                    // When reading doc-values, the block is a list of encoded longs
    +                    List outBlockList = outList.stream().map(this::encode).toList();
    +                    Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList;
    +                    return new SyntheticSourceExample(in, out, outBlock, this::mapping);
    +                } else {
    +                    // When reading row-stride, the block is a list of WKT encoded BytesRefs
    +                    List outBlockList = outList.stream().map(GeoPoint::toWKT).toList();
    +                    Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList;
    +                    return new SyntheticSourceExample(in, out, outBlock, this::mapping);
    +                }
                 }
     
                 private Tuple generateValue() {
    @@ -705,13 +721,18 @@ protected IngestScriptSupport ingestScriptSupport() {
             throw new AssumptionViolatedException("not supported");
         }
     
    -    private boolean useDocValues = false;
    +    @Override
    +    protected Function loadBlockExpected() {
    +        throw new IllegalStateException("Should never reach here, call loadBlockExpected(BlockReaderSupport, boolean) instead");
    +    }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    -        if (useDocValues) {
    +    protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) {
    +        if (columnReader) {
    +            // When using column reader, we expect the output to be doc-values (which means encoded longs)
                 return v -> asJacksonNumberOutput(((Number) v).longValue());
             } else {
    +            // When using row-stride reader, we expect the output to be WKT encoded BytesRef
                 return v -> asWKT((BytesRef) v);
             }
         }
    @@ -732,13 +753,8 @@ protected static Object asWKT(BytesRef value) {
         }
     
         @Override
    -    protected boolean supportsColumnAtATimeReader(MapperService mapper, MappedFieldType ft) {
    -        // Currently ESQL support for geo_point is limited to source values
    -        return false;
    -    }
    -
    -    @Override
    -    public void testBlockLoaderFromRowStrideReaderWithSyntheticSource() {
    -        assumeTrue("Synthetic source not completed supported for geo_point", false);
    +    protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) {
    +        MappedFieldType ft = mapper.fieldType(loaderFieldName);
    +        return new BlockReaderSupport(ft.hasDocValues(), false, mapper, loaderFieldName);
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
    index a0545308c3928..cc024efb5f307 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java
    @@ -56,7 +56,7 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed)
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return v -> {
                 // The test converts the float into a string so we do do
                 Number n = (Number) v;
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java
    index 7aa68a6949b7e..ba9c2e6c4a299 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java
    @@ -423,7 +423,7 @@ public void execute() {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return v -> InetAddresses.toAddrString(InetAddressPoint.decode(BytesRef.deepCopyOf((BytesRef) v).bytes));
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
    index 983054df2fbe7..892dbcb185bdb 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
    @@ -650,7 +650,7 @@ protected boolean supportsIgnoreMalformed() {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return v -> ((BytesRef) v).utf8ToString();
         }
     
    @@ -684,10 +684,14 @@ static class KeywordSyntheticSourceSupport implements SyntheticSourceSupport {
     
             @Override
             public SyntheticSourceExample example(int maxValues) {
    +            return example(maxValues, false);
    +        }
    +
    +        public SyntheticSourceExample example(int maxValues, boolean loadBlockFromSource) {
                 if (randomBoolean()) {
                     Tuple v = generateValue();
                     Object loadBlock = v.v2();
    -                if (ignoreAbove != null && v.v2().length() > ignoreAbove) {
    +                if (loadBlockFromSource == false && ignoreAbove != null && v.v2().length() > ignoreAbove) {
                         loadBlock = null;
                     }
                     return new SyntheticSourceExample(v.v1(), v.v2(), loadBlock, this::mapping);
    @@ -704,9 +708,15 @@ public SyntheticSourceExample example(int maxValues) {
                     }
                 });
                 List outList = store ? outPrimary : new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList());
    -            List loadBlock = docValues
    -                ? new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList())
    -                : List.copyOf(outList);
    +            List loadBlock;
    +            if (loadBlockFromSource) {
    +                // The block loader infrastructure will never return nulls. Just zap them all.
    +                loadBlock = in.stream().filter(m -> m != null).toList();
    +            } else if (docValues) {
    +                loadBlock = new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList());
    +            } else {
    +                loadBlock = List.copyOf(outList);
    +            }
                 Object loadBlockResult = loadBlock.size() == 1 ? loadBlock.get(0) : loadBlock;
                 outList.addAll(outExtraValues);
                 Object out = outList.size() == 1 ? outList.get(0) : outList;
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
    index 0e4502a813c15..f2d4431e5c79f 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java
    @@ -122,7 +122,7 @@ public void testFetchCoerced() throws IOException {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return n -> {
                 Number number = ((Number) n);
                 if (Integer.MIN_VALUE <= number.longValue() && number.longValue() <= Integer.MAX_VALUE) {
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java
    new file mode 100644
    index 0000000000000..9c38487dbdf7b
    --- /dev/null
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java
    @@ -0,0 +1,32 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.index.mapper;
    +
    +import org.elasticsearch.test.ESTestCase;
    +
    +public class MapperMergeContextTests extends ESTestCase {
    +
    +    public void testAddFieldIfPossibleUnderLimit() {
    +        MapperMergeContext context = MapperMergeContext.root(false, false, 1);
    +        assertTrue(context.decrementFieldBudgetIfPossible(1));
    +        assertFalse(context.decrementFieldBudgetIfPossible(1));
    +    }
    +
    +    public void testAddFieldIfPossibleAtLimit() {
    +        MapperMergeContext context = MapperMergeContext.root(false, false, 0);
    +        assertFalse(context.decrementFieldBudgetIfPossible(1));
    +    }
    +
    +    public void testAddFieldIfPossibleUnlimited() {
    +        MapperMergeContext context = MapperMergeContext.root(false, false, Long.MAX_VALUE);
    +        assertTrue(context.decrementFieldBudgetIfPossible(Integer.MAX_VALUE));
    +        assertTrue(context.decrementFieldBudgetIfPossible(Integer.MAX_VALUE));
    +    }
    +
    +}
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
    index aa3b083f4496f..80c074918b06d 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
    @@ -37,9 +37,13 @@ public class MapperServiceTests extends MapperServiceTestCase {
     
         public void testPreflightUpdateDoesNotChangeMapping() throws Throwable {
             final MapperService mapperService = createMapperService(mapping(b -> {}));
    -        merge(mapperService, MergeReason.MAPPING_UPDATE_PREFLIGHT, mapping(b -> createMappingSpecifyingNumberOfFields(b, 1)));
    +        merge(mapperService, MergeReason.MAPPING_AUTO_UPDATE_PREFLIGHT, mapping(b -> createMappingSpecifyingNumberOfFields(b, 1)));
             assertThat("field was not created by preflight check", mapperService.fieldType("field0"), nullValue());
    -        merge(mapperService, MergeReason.MAPPING_UPDATE, mapping(b -> createMappingSpecifyingNumberOfFields(b, 1)));
    +        merge(
    +            mapperService,
    +            randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.MAPPING_AUTO_UPDATE),
    +            mapping(b -> createMappingSpecifyingNumberOfFields(b, 1))
    +        );
             assertThat("field was not created by mapping update", mapperService.fieldType("field0"), notNullValue());
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
    index 621f03813b1ff..61d62c1e41969 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
    @@ -13,6 +13,7 @@
     import org.elasticsearch.common.bytes.BytesReference;
     import org.elasticsearch.common.compress.CompressedXContent;
     import org.elasticsearch.common.settings.Settings;
    +import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.index.IndexVersion;
     import org.elasticsearch.index.IndexVersions;
     import org.elasticsearch.index.mapper.MapperService.MergeReason;
    @@ -1508,16 +1509,47 @@ public void testMergeNested() {
     
             MapperException e = expectThrows(
                 MapperException.class,
    -            () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false, false))
    +            () -> firstMapper.merge(secondMapper, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertThat(e.getMessage(), containsString("[include_in_parent] parameter can't be updated on a nested object mapping"));
     
             NestedObjectMapper result = (NestedObjectMapper) firstMapper.merge(
                 secondMapper,
                 MapperService.MergeReason.INDEX_TEMPLATE,
    -            MapperBuilderContext.root(false, false)
    +            MapperMergeContext.root(false, false, Long.MAX_VALUE)
             );
             assertFalse(result.isIncludeInParent());
             assertTrue(result.isIncludeInRoot());
         }
    +
    +    public void testWithoutMappers() throws IOException {
    +        ObjectMapper shallowObject = createNestedObjectMapperWithAllParametersSet(b -> {});
    +        ObjectMapper object = createNestedObjectMapperWithAllParametersSet(b -> {
    +            b.startObject("keyword");
    +            {
    +                b.field("type", "keyword");
    +            }
    +            b.endObject();
    +        });
    +        assertThat(object.withoutMappers().toString(), equalTo(shallowObject.toString()));
    +    }
    +
    +    private NestedObjectMapper createNestedObjectMapperWithAllParametersSet(CheckedConsumer propertiesBuilder)
    +        throws IOException {
    +        DocumentMapper mapper = createDocumentMapper(mapping(b -> {
    +            b.startObject("nested_object");
    +            {
    +                b.field("type", "nested");
    +                b.field("enabled", false);
    +                b.field("dynamic", false);
    +                b.field("include_in_parent", true);
    +                b.field("include_in_root", true);
    +                b.startObject("properties");
    +                propertiesBuilder.accept(b);
    +                b.endObject();
    +            }
    +            b.endObject();
    +        }));
    +        return (NestedObjectMapper) mapper.mapping().getRoot().getMapper("nested_object");
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java
    index 2db0203cb9383..7b91c84a05c53 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java
    @@ -382,7 +382,7 @@ public void testAllowMultipleValuesField() throws IOException {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) {
    +    protected Function loadBlockExpected() {
             return n -> ((Number) n); // Just assert it's a number
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java
    index 559eb4712d7c1..8eb824884a591 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java
    @@ -41,7 +41,7 @@ public void testMerge() {
             ObjectMapper mergeWith = createMapping(false, true, true, true);
     
             // WHEN merging mappings
    -        final ObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        final ObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             // THEN "baz" new field is added to merged mapping
             final ObjectMapper mergedFoo = (ObjectMapper) merged.getMapper("foo");
    @@ -63,7 +63,7 @@ public void testMergeWhenDisablingField() {
             // THEN a MapperException is thrown with an excepted message
             MapperException e = expectThrows(
                 MapperException.class,
    -            () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false))
    +            () -> rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertEquals("the [enabled] parameter can't be updated for the object mapping [foo]", e.getMessage());
         }
    @@ -75,7 +75,10 @@ public void testMergeDisabledField() {
                 new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE)
             ).build(MapperBuilderContext.root(false, false));
     
    -        RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(
    +            mergeWith,
    +            MapperMergeContext.root(false, false, Long.MAX_VALUE)
    +        );
             assertFalse(((ObjectMapper) merged.getMapper("disabled")).isEnabled());
         }
     
    @@ -84,14 +87,14 @@ public void testMergeEnabled() {
     
             MapperException e = expectThrows(
                 MapperException.class,
    -            () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false))
    +            () -> rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertEquals("the [enabled] parameter can't be updated for the object mapping [disabled]", e.getMessage());
     
             ObjectMapper result = rootObjectMapper.merge(
                 mergeWith,
                 MapperService.MergeReason.INDEX_TEMPLATE,
    -            MapperBuilderContext.root(false, false)
    +            MapperMergeContext.root(false, false, Long.MAX_VALUE)
             );
             assertTrue(result.isEnabled());
         }
    @@ -106,14 +109,14 @@ public void testMergeEnabledForRootMapper() {
     
             MapperException e = expectThrows(
                 MapperException.class,
    -            () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false, false))
    +            () -> firstMapper.merge(secondMapper, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertEquals("the [enabled] parameter can't be updated for the object mapping [" + type + "]", e.getMessage());
     
             ObjectMapper result = firstMapper.merge(
                 secondMapper,
                 MapperService.MergeReason.INDEX_TEMPLATE,
    -            MapperBuilderContext.root(false, false)
    +            MapperMergeContext.root(false, false, Long.MAX_VALUE)
             );
             assertFalse(result.isEnabled());
         }
    @@ -128,7 +131,10 @@ public void testMergeDisabledRootMapper() {
                 Collections.singletonMap("test", new TestRuntimeField("test", "long"))
             ).build(MapperBuilderContext.root(false, false));
     
    -        RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(
    +            mergeWith,
    +            MapperMergeContext.root(false, false, Long.MAX_VALUE)
    +        );
             assertFalse(merged.isEnabled());
             assertEquals(1, merged.runtimeFields().size());
             assertEquals("test", merged.runtimeFields().iterator().next().name());
    @@ -138,7 +144,7 @@ public void testMergedFieldNamesFieldWithDotsSubobjectsFalseAtRoot() {
             RootObjectMapper mergeInto = createRootSubobjectFalseLeafWithDots();
             RootObjectMapper mergeWith = createRootSubobjectFalseLeafWithDots();
     
    -        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) merged.getMapper("host.name");
             assertEquals("host.name", keywordFieldMapper.name());
    @@ -153,7 +159,7 @@ public void testMergedFieldNamesFieldWithDotsSubobjectsFalse() {
                 createObjectSubobjectsFalseLeafWithDots()
             ).build(MapperBuilderContext.root(false, false));
     
    -        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
             ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
    @@ -168,7 +174,7 @@ public void testMergedFieldNamesMultiFields() {
             RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(createTextKeywordMultiField("text"))
                 .build(MapperBuilderContext.root(false, false));
     
    -        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             TextFieldMapper text = (TextFieldMapper) merged.getMapper("text");
             assertEquals("text", text.name());
    @@ -186,7 +192,7 @@ public void testMergedFieldNamesMultiFieldsWithinSubobjectsFalse() {
                 createObjectSubobjectsFalseLeafWithMultiField()
             ).build(MapperBuilderContext.root(false, false));
     
    -        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false));
    +        final ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             ObjectMapper foo = (ObjectMapper) merged.getMapper("foo");
             ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics");
    @@ -198,6 +204,113 @@ public void testMergedFieldNamesMultiFieldsWithinSubobjectsFalse() {
             assertEquals("keyword", fieldMapper.simpleName());
         }
     
    +    public void testMergeWithLimit() {
    +        // GIVEN an enriched mapping with "baz" new field
    +        ObjectMapper mergeWith = createMapping(false, true, true, true);
    +
    +        // WHEN merging mappings
    +        final ObjectMapper mergedAdd0 = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, 0));
    +        final ObjectMapper mergedAdd1 = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, 1));
    +
    +        // THEN "baz" new field is added to merged mapping
    +        assertEquals(3, rootObjectMapper.mapperSize());
    +        assertEquals(4, mergeWith.mapperSize());
    +        assertEquals(3, mergedAdd0.mapperSize());
    +        assertEquals(4, mergedAdd1.mapperSize());
    +    }
    +
    +    public void testMergeWithLimitTruncatedObjectField() {
    +        RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build(MapperBuilderContext.root(false, false));
    +        RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(
    +            new ObjectMapper.Builder("parent", Explicit.IMPLICIT_FALSE).add(
    +                new KeywordFieldMapper.Builder("child1", IndexVersion.current())
    +            ).add(new KeywordFieldMapper.Builder("child2", IndexVersion.current()))
    +        ).build(MapperBuilderContext.root(false, false));
    +
    +        ObjectMapper mergedAdd0 = root.merge(mergeWith, MapperMergeContext.root(false, false, 0));
    +        ObjectMapper mergedAdd1 = root.merge(mergeWith, MapperMergeContext.root(false, false, 1));
    +        ObjectMapper mergedAdd2 = root.merge(mergeWith, MapperMergeContext.root(false, false, 2));
    +        ObjectMapper mergedAdd3 = root.merge(mergeWith, MapperMergeContext.root(false, false, 3));
    +        assertEquals(0, root.mapperSize());
    +        assertEquals(0, mergedAdd0.mapperSize());
    +        assertEquals(1, mergedAdd1.mapperSize());
    +        assertEquals(2, mergedAdd2.mapperSize());
    +        assertEquals(3, mergedAdd3.mapperSize());
    +
    +        ObjectMapper parent1 = (ObjectMapper) mergedAdd1.getMapper("parent");
    +        assertNull(parent1.getMapper("child1"));
    +        assertNull(parent1.getMapper("child2"));
    +
    +        ObjectMapper parent2 = (ObjectMapper) mergedAdd2.getMapper("parent");
    +        // the order is not deterministic, but we expect one to be null and the other to be non-null
    +        assertTrue(parent2.getMapper("child1") == null ^ parent2.getMapper("child2") == null);
    +
    +        ObjectMapper parent3 = (ObjectMapper) mergedAdd3.getMapper("parent");
    +        assertNotNull(parent3.getMapper("child1"));
    +        assertNotNull(parent3.getMapper("child2"));
    +    }
    +
    +    public void testMergeSameObjectDifferentFields() {
    +        RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(
    +            new ObjectMapper.Builder("parent", Explicit.IMPLICIT_TRUE).add(new KeywordFieldMapper.Builder("child1", IndexVersion.current()))
    +        ).build(MapperBuilderContext.root(false, false));
    +        RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(
    +            new ObjectMapper.Builder("parent", Explicit.IMPLICIT_TRUE).add(
    +                new KeywordFieldMapper.Builder("child1", IndexVersion.current()).ignoreAbove(42)
    +            ).add(new KeywordFieldMapper.Builder("child2", IndexVersion.current()))
    +        ).build(MapperBuilderContext.root(false, false));
    +
    +        ObjectMapper mergedAdd0 = root.merge(mergeWith, MapperMergeContext.root(false, false, 0));
    +        ObjectMapper mergedAdd1 = root.merge(mergeWith, MapperMergeContext.root(false, false, 1));
    +        assertEquals(2, root.mapperSize());
    +        assertEquals(2, mergedAdd0.mapperSize());
    +        assertEquals(3, mergedAdd1.mapperSize());
    +
    +        ObjectMapper parent0 = (ObjectMapper) mergedAdd0.getMapper("parent");
    +        assertNotNull(parent0.getMapper("child1"));
    +        assertEquals(42, ((KeywordFieldMapper) parent0.getMapper("child1")).fieldType().ignoreAbove());
    +        assertNull(parent0.getMapper("child2"));
    +
    +        ObjectMapper parent1 = (ObjectMapper) mergedAdd1.getMapper("parent");
    +        assertNotNull(parent1.getMapper("child1"));
    +        assertEquals(42, ((KeywordFieldMapper) parent1.getMapper("child1")).fieldType().ignoreAbove());
    +        assertNotNull(parent1.getMapper("child2"));
    +    }
    +
    +    public void testMergeWithLimitMultiField() {
    +        RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(
    +            createTextKeywordMultiField("text", "keyword1")
    +        ).build(MapperBuilderContext.root(false, false));
    +        RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(
    +            createTextKeywordMultiField("text", "keyword2")
    +        ).build(MapperBuilderContext.root(false, false));
    +
    +        assertEquals(2, mergeInto.mapperSize());
    +        assertEquals(2, mergeWith.mapperSize());
    +
    +        ObjectMapper mergedAdd0 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 0));
    +        ObjectMapper mergedAdd1 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 1));
    +        assertEquals(2, mergedAdd0.mapperSize());
    +        assertEquals(3, mergedAdd1.mapperSize());
    +    }
    +
    +    public void testMergeWithLimitRuntimeField() {
    +        RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).addRuntimeField(
    +            new TestRuntimeField("existing_runtime_field", "keyword")
    +        ).add(createTextKeywordMultiField("text", "keyword1")).build(MapperBuilderContext.root(false, false));
    +        RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).addRuntimeField(
    +            new TestRuntimeField("existing_runtime_field", "keyword")
    +        ).addRuntimeField(new TestRuntimeField("new_runtime_field", "keyword")).build(MapperBuilderContext.root(false, false));
    +
    +        assertEquals(3, mergeInto.mapperSize());
    +        assertEquals(2, mergeWith.mapperSize());
    +
    +        ObjectMapper mergedAdd0 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 0));
    +        ObjectMapper mergedAdd1 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 1));
    +        assertEquals(3, mergedAdd0.mapperSize());
    +        assertEquals(4, mergedAdd1.mapperSize());
    +    }
    +
         private static RootObjectMapper createRootSubobjectFalseLeafWithDots() {
             FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current());
             FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false));
    @@ -231,8 +344,12 @@ private ObjectMapper.Builder createObjectSubobjectsFalseLeafWithMultiField() {
         }
     
         private TextFieldMapper.Builder createTextKeywordMultiField(String name) {
    +        return createTextKeywordMultiField(name, "keyword");
    +    }
    +
    +    private TextFieldMapper.Builder createTextKeywordMultiField(String name, String multiFieldName) {
             TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers());
    -        builder.multiFieldsBuilder.add(new KeywordFieldMapper.Builder("keyword", IndexVersion.current()));
    +        builder.multiFieldsBuilder.add(new KeywordFieldMapper.Builder(multiFieldName, IndexVersion.current()));
             return builder;
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java
    index 3c77bf20b37d2..6e958ddbea904 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java
    @@ -8,14 +8,17 @@
     
     package org.elasticsearch.index.mapper;
     
    +import org.elasticsearch.common.Explicit;
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.bytes.BytesArray;
     import org.elasticsearch.common.bytes.BytesReference;
     import org.elasticsearch.common.compress.CompressedXContent;
     import org.elasticsearch.common.settings.Settings;
    +import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.index.IndexVersion;
     import org.elasticsearch.index.mapper.MapperService.MergeReason;
     import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
    +import org.elasticsearch.xcontent.XContentBuilder;
     import org.elasticsearch.xcontent.XContentFactory;
     import org.elasticsearch.xcontent.XContentType;
     
    @@ -23,6 +26,7 @@
     import java.util.List;
     
     import static org.hamcrest.Matchers.containsString;
    +import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.instanceOf;
     import static org.hamcrest.Matchers.nullValue;
     
    @@ -123,7 +127,7 @@ public void testMerge() throws IOException {
                 "_doc",
                 new CompressedXContent(BytesReference.bytes(topMapping(b -> b.field("dynamic", "strict"))))
             );
    -        Mapping merged = mapper.mapping().merge(mergeWith, reason);
    +        Mapping merged = mapper.mapping().merge(mergeWith, reason, Long.MAX_VALUE);
             assertEquals(Dynamic.STRICT, merged.getRoot().dynamic());
         }
     
    @@ -468,7 +472,7 @@ public void testSubobjectsCannotBeUpdated() throws IOException {
             }))));
             MapperException exception = expectThrows(
                 MapperException.class,
    -            () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE)
    +            () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE)
             );
             assertEquals("the [subobjects] parameter can't be updated for the object mapping [field]", exception.getMessage());
         }
    @@ -482,7 +486,7 @@ public void testSubobjectsCannotBeUpdatedOnRoot() throws IOException {
             }))));
             MapperException exception = expectThrows(
                 MapperException.class,
    -            () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE)
    +            () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE)
             );
             assertEquals("the [subobjects] parameter can't be updated for the object mapping [_doc]", exception.getMessage());
         }
    @@ -525,4 +529,45 @@ public void testSyntheticSourceDocValuesFieldWithout() throws IOException {
             assertThat(o.syntheticFieldLoader().docValuesLoader(null, null), nullValue());
             assertThat(mapper.mapping().getRoot().syntheticFieldLoader().docValuesLoader(null, null), nullValue());
         }
    +
    +    public void testNestedObjectWithMultiFieldsMapperSize() throws IOException {
    +        ObjectMapper.Builder mapperBuilder = new ObjectMapper.Builder("parent_size_1", Explicit.IMPLICIT_TRUE).add(
    +            new ObjectMapper.Builder("child_size_2", Explicit.IMPLICIT_TRUE).add(
    +                new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers()).addMultiField(
    +                    new KeywordFieldMapper.Builder("multi_field_size_4", IndexVersion.current())
    +                ).addMultiField(new KeywordFieldMapper.Builder("multi_field_size_5", IndexVersion.current()))
    +            )
    +        );
    +        assertThat(mapperBuilder.build(MapperBuilderContext.root(false, false)).mapperSize(), equalTo(5));
    +    }
    +
    +    public void testWithoutMappers() throws IOException {
    +        ObjectMapper shallowObject = createObjectMapperWithAllParametersSet(b -> {});
    +        ObjectMapper object = createObjectMapperWithAllParametersSet(b -> {
    +            b.startObject("keyword");
    +            {
    +                b.field("type", "keyword");
    +            }
    +            b.endObject();
    +        });
    +        assertThat(object.withoutMappers().toString(), equalTo(shallowObject.toString()));
    +    }
    +
    +    private ObjectMapper createObjectMapperWithAllParametersSet(CheckedConsumer propertiesBuilder)
    +        throws IOException {
    +        DocumentMapper mapper = createDocumentMapper(mapping(b -> {
    +            b.startObject("object");
    +            {
    +                b.field("type", "object");
    +                b.field("subobjects", false);
    +                b.field("enabled", false);
    +                b.field("dynamic", false);
    +                b.startObject("properties");
    +                propertiesBuilder.accept(b);
    +                b.endObject();
    +            }
    +            b.endObject();
    +        }));
    +        return (ObjectMapper) mapper.mapping().getRoot().getMapper("object");
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java
    index 9b447d0727152..562a30ba4f389 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java
    @@ -345,7 +345,7 @@ public void testMerging() {
                 {"type":"test_mapper","fixed":true,"fixed2":true,"required":"value"}""");
             IllegalArgumentException e = expectThrows(
                 IllegalArgumentException.class,
    -            () -> mapper.merge(badMerge, MapperBuilderContext.root(false, false))
    +            () -> mapper.merge(badMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             String expectedError = """
                 Mapper for [field] conflicts with existing mapper:
    @@ -358,7 +358,7 @@ public void testMerging() {
             // TODO: should we have to include 'fixed' here? Or should updates take as 'defaults' the existing values?
             TestMapper goodMerge = fromMapping("""
                 {"type":"test_mapper","fixed":false,"variable":"updated","required":"value"}""");
    -        TestMapper merged = (TestMapper) mapper.merge(goodMerge, MapperBuilderContext.root(false, false));
    +        TestMapper merged = (TestMapper) mapper.merge(goodMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE));
     
             assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper)); // original mapping is unaffected
             assertEquals("""
    @@ -376,7 +376,7 @@ public void testMultifields() throws IOException {
             String addSubField = """
                 {"type":"test_mapper","variable":"foo","required":"value","fields":{"sub2":{"type":"keyword"}}}""";
             TestMapper toMerge = fromMapping(addSubField);
    -        TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false, false));
    +        TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE));
             assertEquals(XContentHelper.stripWhitespace("""
                 {
                   "field": {
    @@ -399,7 +399,7 @@ public void testMultifields() throws IOException {
             TestMapper badToMerge = fromMapping(badSubField);
             IllegalArgumentException e = expectThrows(
                 IllegalArgumentException.class,
    -            () -> merged.merge(badToMerge, MapperBuilderContext.root(false, false))
    +            () -> merged.merge(badToMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertEquals("mapper [field.sub2] cannot be changed from type [keyword] to [binary]", e.getMessage());
         }
    @@ -415,13 +415,13 @@ public void testCopyTo() {
     
             TestMapper toMerge = fromMapping("""
                 {"type":"test_mapper","variable":"updated","required":"value","copy_to":["foo","bar"]}""");
    -        TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false, false));
    +        TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE));
             assertEquals("""
                 {"field":{"type":"test_mapper","variable":"updated","required":"value","copy_to":["foo","bar"]}}""", Strings.toString(merged));
     
             TestMapper removeCopyTo = fromMapping("""
                 {"type":"test_mapper","variable":"updated","required":"value"}""");
    -        TestMapper noCopyTo = (TestMapper) merged.merge(removeCopyTo, MapperBuilderContext.root(false, false));
    +        TestMapper noCopyTo = (TestMapper) merged.merge(removeCopyTo, MapperMergeContext.root(false, false, Long.MAX_VALUE));
             assertEquals("""
                 {"field":{"type":"test_mapper","variable":"updated","required":"value"}}""", Strings.toString(noCopyTo));
         }
    @@ -487,7 +487,7 @@ public void testCustomSerialization() {
             TestMapper toMerge = fromMapping(conflict);
             IllegalArgumentException e = expectThrows(
                 IllegalArgumentException.class,
    -            () -> mapper.merge(toMerge, MapperBuilderContext.root(false, false))
    +            () -> mapper.merge(toMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE))
             );
             assertEquals(
                 "Mapper for [field] conflicts with existing mapper:\n"
    @@ -576,7 +576,10 @@ public void testAnalyzers() {
     
             TestMapper original = mapper;
             TestMapper toMerge = fromMapping(mapping);
    -        e = expectThrows(IllegalArgumentException.class, () -> original.merge(toMerge, MapperBuilderContext.root(false, false)));
    +        e = expectThrows(
    +            IllegalArgumentException.class,
    +            () -> original.merge(toMerge, MapperMergeContext.root(false, false, Long.MAX_VALUE))
    +        );
             assertEquals(
                 "Mapper for [field] conflicts with existing mapper:\n" + "\tCannot update parameter [analyzer] from [default] to [_standard]",
                 e.getMessage()
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java
    index 5bd85a6dcdea7..b2a6651142181 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java
    @@ -10,6 +10,7 @@
     
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.xcontent.XContentHelper;
    +import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.index.mapper.MapperService.MergeReason;
     import org.elasticsearch.xcontent.XContentBuilder;
     import org.elasticsearch.xcontent.XContentFactory;
    @@ -19,6 +20,7 @@
     import java.util.Collections;
     
     import static org.hamcrest.Matchers.containsString;
    +import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.instanceOf;
     
     public class RootObjectMapperTests extends MapperServiceTestCase {
    @@ -160,6 +162,7 @@ public void testRuntimeSection() throws IOException {
             }));
             MapperService mapperService = createMapperService(mapping);
             assertEquals(mapping, mapperService.documentMapper().mappingSource().toString());
    +        assertEquals(3, mapperService.documentMapper().mapping().getRoot().mapperSize());
         }
     
         public void testRuntimeSectionRejectedUpdate() throws IOException {
    @@ -358,4 +361,51 @@ public void testEmptyType() throws Exception {
             assertThat(e.getMessage(), containsString("type cannot be an empty string"));
         }
     
    +    public void testWithoutMappers() throws IOException {
    +        RootObjectMapper shallowRoot = createRootObjectMapperWithAllParametersSet(b -> {}, b -> {});
    +        RootObjectMapper root = createRootObjectMapperWithAllParametersSet(b -> {
    +            b.startObject("keyword");
    +            {
    +                b.field("type", "keyword");
    +            }
    +            b.endObject();
    +        }, b -> {
    +            b.startObject("runtime");
    +            b.startObject("field").field("type", "keyword").endObject();
    +            b.endObject();
    +        });
    +        assertThat(root.withoutMappers().toString(), equalTo(shallowRoot.toString()));
    +    }
    +
    +    private RootObjectMapper createRootObjectMapperWithAllParametersSet(
    +        CheckedConsumer buildProperties,
    +        CheckedConsumer buildRuntimeFields
    +    ) throws IOException {
    +        DocumentMapper mapper = createDocumentMapper(topMapping(b -> {
    +            b.field("enabled", false);
    +            b.field("subobjects", false);
    +            b.field("dynamic", false);
    +            b.field("date_detection", false);
    +            b.field("numeric_detection", false);
    +            b.field("dynamic_date_formats", Collections.singletonList("yyyy-MM-dd"));
    +            b.startArray("dynamic_templates");
    +            {
    +                b.startObject();
    +                {
    +                    b.startObject("my_template");
    +                    {
    +                        b.startObject("mapping").field("type", "keyword").endObject();
    +                    }
    +                    b.endObject();
    +                }
    +                b.endObject();
    +            }
    +            b.endArray();
    +            b.startObject("properties");
    +            buildProperties.accept(b);
    +            b.endObject();
    +        }));
    +        return mapper.mapping().getRoot();
    +    }
    +
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java
    index 639b66b260469..f92867d1ce461 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java
    @@ -97,30 +97,6 @@ protected Object getSampleValueForDocument() {
             return "value";
         }
     
    -    @Override
    -    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152")
    -    public void testBlockLoaderFromColumnReader() throws IOException {
    -        super.testBlockLoaderFromColumnReader();
    -    }
    -
    -    @Override
    -    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152")
    -    public void testBlockLoaderFromRowStrideReader() throws IOException {
    -        super.testBlockLoaderFromRowStrideReader();
    -    }
    -
    -    @Override
    -    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152")
    -    public void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException {
    -        super.testBlockLoaderFromColumnReaderWithSyntheticSource();
    -    }
    -
    -    @Override
    -    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152")
    -    public void testBlockLoaderFromRowStrideReaderWithSyntheticSource() throws IOException {
    -        super.testBlockLoaderFromRowStrideReaderWithSyntheticSource();
    -    }
    -
         public final void testExistsQueryIndexDisabled() throws IOException {
             MapperService mapperService = createMapperService(fieldMapping(b -> {
                 minimalMapping(b);
    @@ -1144,35 +1120,52 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed)
             assumeFalse("ignore_malformed not supported", ignoreMalformed);
             boolean storeTextField = randomBoolean();
             boolean storedKeywordField = storeTextField || randomBoolean();
    -        String nullValue = storeTextField || usually() ? null : randomAlphaOfLength(2);
    +        boolean indexText = randomBoolean();
    +        Integer ignoreAbove = randomBoolean() ? null : between(10, 100);
             KeywordFieldMapperTests.KeywordSyntheticSourceSupport keywordSupport = new KeywordFieldMapperTests.KeywordSyntheticSourceSupport(
    -            randomBoolean() ? null : between(10, 100),
    +            ignoreAbove,
                 storedKeywordField,
    -            nullValue,
    +            null,
                 false == storeTextField
             );
             return new SyntheticSourceSupport() {
                 @Override
                 public SyntheticSourceExample example(int maxValues) {
    -                SyntheticSourceExample delegate = keywordSupport.example(maxValues);
                     if (storeTextField) {
    +                    SyntheticSourceExample delegate = keywordSupport.example(maxValues, true);
                         return new SyntheticSourceExample(
                             delegate.inputValue(),
    -                        delegate.result(),
    -                        delegate.result(),
    -                        b -> b.field("type", "text").field("store", true)
    +                        delegate.expectedForSyntheticSource(),
    +                        delegate.expectedForBlockLoader(),
    +                        b -> {
    +                            b.field("type", "text").field("store", true);
    +                            if (indexText == false) {
    +                                b.field("index", false);
    +                            }
    +                        }
                         );
                     }
    -                return new SyntheticSourceExample(delegate.inputValue(), delegate.result(), delegate.blockLoaderResult(), b -> {
    -                    b.field("type", "text");
    -                    b.startObject("fields");
    -                    {
    -                        b.startObject(randomAlphaOfLength(4));
    -                        delegate.mapping().accept(b);
    +                // We'll load from _source if ignore_above is defined, otherwise we load from the keyword field.
    +                boolean loadingFromSource = ignoreAbove != null;
    +                SyntheticSourceExample delegate = keywordSupport.example(maxValues, loadingFromSource);
    +                return new SyntheticSourceExample(
    +                    delegate.inputValue(),
    +                    delegate.expectedForSyntheticSource(),
    +                    delegate.expectedForBlockLoader(),
    +                    b -> {
    +                        b.field("type", "text");
    +                        if (indexText == false) {
    +                            b.field("index", false);
    +                        }
    +                        b.startObject("fields");
    +                        {
    +                            b.startObject(randomAlphaOfLength(4));
    +                            delegate.mapping().accept(b);
    +                            b.endObject();
    +                        }
                             b.endObject();
                         }
    -                    b.endObject();
    -                });
    +                );
                 }
     
                 @Override
    @@ -1210,8 +1203,8 @@ public List invalidExample() throws IOException {
         }
     
         @Override
    -    protected Function loadBlockExpected(MapperService mapper, String fieldName) {
    -        if (nullLoaderExpected(mapper, fieldName)) {
    +    protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) {
    +        if (nullLoaderExpected(blockReaderSupport.mapper(), blockReaderSupport.loaderFieldName())) {
                 return null;
             }
             return v -> ((BytesRef) v).utf8ToString();
    @@ -1367,24 +1360,29 @@ public void testEmpty() throws Exception {
         }
     
         @Override
    -    protected boolean supportsColumnAtATimeReader(MapperService mapper, MappedFieldType ft) {
    +    protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) {
    +        MappedFieldType ft = mapper.fieldType(loaderFieldName);
             String parentName = mapper.mappingLookup().parentField(ft.name());
             if (parentName == null) {
                 TextFieldMapper.TextFieldType text = (TextFieldType) ft;
    -            return text.syntheticSourceDelegate() != null && text.syntheticSourceDelegate().hasDocValues();
    +            boolean supportsColumnAtATimeReader = text.syntheticSourceDelegate() != null
    +                && text.syntheticSourceDelegate().hasDocValues()
    +                && text.canUseSyntheticSourceDelegateForQuerying();
    +            return new BlockReaderSupport(supportsColumnAtATimeReader, mapper, loaderFieldName);
             }
             MappedFieldType parent = mapper.fieldType(parentName);
             if (false == parent.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) {
                 throw new UnsupportedOperationException();
             }
             KeywordFieldMapper.KeywordFieldType kwd = (KeywordFieldMapper.KeywordFieldType) parent;
    -        return kwd.hasDocValues();
    +        return new BlockReaderSupport(kwd.hasDocValues(), mapper, loaderFieldName);
         }
     
         public void testBlockLoaderFromParentColumnReader() throws IOException {
             testBlockLoaderFromParent(true, randomBoolean());
         }
     
    +    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104158")
         public void testBlockLoaderParentFromRowStrideReader() throws IOException {
             testBlockLoaderFromParent(false, randomBoolean());
         }
    @@ -1411,6 +1409,7 @@ private void testBlockLoaderFromParent(boolean columnReader, boolean syntheticSo
                 b.endObject();
             };
             MapperService mapper = createMapperService(syntheticSource ? syntheticSourceMapping(buildFields) : mapping(buildFields));
    -        testBlockLoader(columnReader, example, mapper, "field.sub");
    +        BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field.sub");
    +        testBlockLoader(columnReader, example, blockReaderSupport);
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java
    index e43fa379054bf..c3d2d6a3f194b 100644
    --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java
    @@ -8,6 +8,11 @@
     
     package org.elasticsearch.index.mapper.vectors;
     
    +import org.apache.lucene.queries.function.FunctionQuery;
    +import org.apache.lucene.queries.function.valuesource.ByteVectorSimilarityFunction;
    +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction;
    +import org.apache.lucene.search.BooleanClause;
    +import org.apache.lucene.search.BooleanQuery;
     import org.apache.lucene.search.KnnByteVectorQuery;
     import org.apache.lucene.search.KnnFloatVectorQuery;
     import org.apache.lucene.search.Query;
    @@ -158,6 +163,64 @@ public void testCreateNestedKnnQuery() {
             }
         }
     
    +    public void testExactKnnQuery() {
    +        int dims = randomIntBetween(2, 2048);
    +        {
    +            DenseVectorFieldType field = new DenseVectorFieldType(
    +                "f",
    +                IndexVersion.current(),
    +                DenseVectorFieldMapper.ElementType.FLOAT,
    +                dims,
    +                true,
    +                VectorSimilarity.COSINE,
    +                Collections.emptyMap()
    +            );
    +            float[] queryVector = new float[dims];
    +            for (int i = 0; i < dims; i++) {
    +                queryVector[i] = randomFloat();
    +            }
    +            Query query = field.createExactKnnQuery(queryVector);
    +            assertTrue(query instanceof BooleanQuery);
    +            BooleanQuery booleanQuery = (BooleanQuery) query;
    +            boolean foundFunction = false;
    +            for (BooleanClause clause : booleanQuery) {
    +                if (clause.getQuery() instanceof FunctionQuery functionQuery) {
    +                    foundFunction = true;
    +                    assertTrue(functionQuery.getValueSource() instanceof FloatVectorSimilarityFunction);
    +                }
    +            }
    +            assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction);
    +        }
    +        {
    +            DenseVectorFieldType field = new DenseVectorFieldType(
    +                "f",
    +                IndexVersion.current(),
    +                DenseVectorFieldMapper.ElementType.BYTE,
    +                dims,
    +                true,
    +                VectorSimilarity.COSINE,
    +                Collections.emptyMap()
    +            );
    +            byte[] queryVector = new byte[dims];
    +            float[] floatQueryVector = new float[dims];
    +            for (int i = 0; i < dims; i++) {
    +                queryVector[i] = randomByte();
    +                floatQueryVector[i] = queryVector[i];
    +            }
    +            Query query = field.createExactKnnQuery(floatQueryVector);
    +            assertTrue(query instanceof BooleanQuery);
    +            BooleanQuery booleanQuery = (BooleanQuery) query;
    +            boolean foundFunction = false;
    +            for (BooleanClause clause : booleanQuery) {
    +                if (clause.getQuery() instanceof FunctionQuery functionQuery) {
    +                    foundFunction = true;
    +                    assertTrue(functionQuery.getValueSource() instanceof ByteVectorSimilarityFunction);
    +                }
    +            }
    +            assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction);
    +        }
    +    }
    +
         public void testFloatCreateKnnQuery() {
             DenseVectorFieldType unindexedField = new DenseVectorFieldType(
                 "f",
    diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
    index 516f65111afca..137e0cb348a9c 100644
    --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
    @@ -28,8 +28,12 @@
     import org.elasticsearch.search.internal.SearchContext;
     import org.elasticsearch.search.sort.FieldSortBuilder;
     import org.elasticsearch.search.sort.SortOrder;
    +import org.elasticsearch.search.vectors.ExactKnnQueryBuilder;
    +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder;
     import org.elasticsearch.test.AbstractQueryTestCase;
     import org.elasticsearch.test.TransportVersionUtils;
    +import org.elasticsearch.xcontent.XContentBuilder;
    +import org.elasticsearch.xcontent.XContentFactory;
     import org.hamcrest.Matchers;
     
     import java.io.IOException;
    @@ -48,6 +52,9 @@
     
     public class NestedQueryBuilderTests extends AbstractQueryTestCase {
     
    +    private static final String VECTOR_FIELD = "vector";
    +    private static final int VECTOR_DIMENSION = 3;
    +
         @Override
         protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
             mapperService.merge(
    @@ -76,6 +83,27 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
                 ),
                 MapperService.MergeReason.MAPPING_UPDATE
             );
    +        XContentBuilder builder = XContentFactory.jsonBuilder()
    +            .startObject()
    +            .startObject("properties")
    +            .startObject("nested1")
    +            .field("type", "nested")
    +            .startObject("properties")
    +            .startObject(VECTOR_FIELD)
    +            .field("type", "dense_vector")
    +            .field("dims", VECTOR_DIMENSION)
    +            .field("index", true)
    +            .field("similarity", "cosine")
    +            .endObject()
    +            .endObject()
    +            .endObject()
    +            .endObject()
    +            .endObject();
    +        mapperService.merge(
    +            MapperService.SINGLE_MAPPING_NAME,
    +            new CompressedXContent(Strings.toString(builder)),
    +            MapperService.MergeReason.MAPPING_UPDATE
    +        );
         }
     
         /**
    @@ -233,6 +261,27 @@ public void testMustRewrite() throws IOException {
             assertEquals("Rewrite first", e.getMessage());
         }
     
    +    public void testKnnRewriteForInnerHits() throws IOException {
    +        SearchExecutionContext context = createSearchExecutionContext();
    +        context.setAllowUnmappedFields(true);
    +        KnnVectorQueryBuilder innerQueryBuilder = new KnnVectorQueryBuilder(
    +            "nested1." + VECTOR_FIELD,
    +            new float[] { 1.0f, 2.0f, 3.0f },
    +            1,
    +            null
    +        );
    +        NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder(
    +            "nested1",
    +            innerQueryBuilder,
    +            RandomPicks.randomFrom(random(), ScoreMode.values())
    +        );
    +        InnerHitsRewriteContext rewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), context::nowInMillis);
    +        QueryBuilder queryBuilder = Rewriteable.rewrite(nestedQueryBuilder, rewriteContext, true);
    +        assertTrue(queryBuilder instanceof NestedQueryBuilder);
    +        NestedQueryBuilder rewritten = (NestedQueryBuilder) queryBuilder;
    +        assertTrue(rewritten.query() instanceof ExactKnnQueryBuilder);
    +    }
    +
         public void testIgnoreUnmapped() throws IOException {
             final NestedQueryBuilder queryBuilder = new NestedQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
             queryBuilder.ignoreUnmapped(true);
    diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java
    index ef32360722474..f8162eb987226 100644
    --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java
    @@ -9,6 +9,7 @@
     package org.elasticsearch.index.reindex;
     
     import org.elasticsearch.ElasticsearchException;
    +import org.elasticsearch.ElasticsearchParseException;
     import org.elasticsearch.ResourceNotFoundException;
     import org.elasticsearch.TransportVersion;
     import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
    @@ -16,7 +17,10 @@
     import org.elasticsearch.common.io.stream.BytesStreamOutput;
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.index.reindex.BulkByScrollTask.Status;
    +import org.elasticsearch.rest.RestStatus;
     import org.elasticsearch.test.AbstractXContentTestCase;
    +import org.elasticsearch.xcontent.ObjectParser;
    +import org.elasticsearch.xcontent.ParseField;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentParser;
     
    @@ -28,10 +32,77 @@
     import static java.util.Collections.emptyList;
     import static java.util.Collections.singletonList;
     import static org.apache.lucene.tests.util.TestUtil.randomSimpleString;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     import static org.elasticsearch.core.TimeValue.timeValueMillis;
     
     public class BulkByScrollResponseTests extends AbstractXContentTestCase {
     
    +    private static final ObjectParser PARSER = new ObjectParser<>(
    +        "bulk_by_scroll_response",
    +        true,
    +        BulkByScrollResponseBuilder::new
    +    );
    +    static {
    +        PARSER.declareLong(BulkByScrollResponseBuilder::setTook, new ParseField(BulkByScrollResponse.TOOK_FIELD));
    +        PARSER.declareBoolean(BulkByScrollResponseBuilder::setTimedOut, new ParseField(BulkByScrollResponse.TIMED_OUT_FIELD));
    +        PARSER.declareObjectArray(
    +            BulkByScrollResponseBuilder::setFailures,
    +            (p, c) -> parseFailure(p),
    +            new ParseField(BulkByScrollResponse.FAILURES_FIELD)
    +        );
    +        // since the result of BulkByScrollResponse.Status are mixed we also parse that in this
    +        Status.declareFields(PARSER);
    +    }
    +
    +    private static Object parseFailure(XContentParser parser) throws IOException {
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
    +        XContentParser.Token token;
    +        String index = null;
    +        String id = null;
    +        Integer status = null;
    +        Integer shardId = null;
    +        String nodeId = null;
    +        ElasticsearchException bulkExc = null;
    +        ElasticsearchException searchExc = null;
    +        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +            ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
    +            String name = parser.currentName();
    +            token = parser.nextToken();
    +            if (token == XContentParser.Token.START_ARRAY) {
    +                parser.skipChildren();
    +            } else if (token == XContentParser.Token.START_OBJECT) {
    +                switch (name) {
    +                    case ScrollableHitSource.SearchFailure.REASON_FIELD -> searchExc = ElasticsearchException.fromXContent(parser);
    +                    case Failure.CAUSE_FIELD -> bulkExc = ElasticsearchException.fromXContent(parser);
    +                    default -> parser.skipChildren();
    +                }
    +            } else if (token == XContentParser.Token.VALUE_STRING) {
    +                switch (name) {
    +                    // This field is the same as SearchFailure.index
    +                    case Failure.INDEX_FIELD -> index = parser.text();
    +                    case Failure.ID_FIELD -> id = parser.text();
    +                    case ScrollableHitSource.SearchFailure.NODE_FIELD -> nodeId = parser.text();
    +                }
    +            } else if (token == XContentParser.Token.VALUE_NUMBER) {
    +                switch (name) {
    +                    case Failure.STATUS_FIELD -> status = parser.intValue();
    +                    case ScrollableHitSource.SearchFailure.SHARD_FIELD -> shardId = parser.intValue();
    +                }
    +            }
    +        }
    +        if (bulkExc != null) {
    +            return new Failure(index, id, bulkExc, RestStatus.fromCode(status));
    +        } else if (searchExc != null) {
    +            if (status == null) {
    +                return new ScrollableHitSource.SearchFailure(searchExc, index, shardId, nodeId);
    +            } else {
    +                return new ScrollableHitSource.SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status));
    +            }
    +        } else {
    +            throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
    +        }
    +    }
    +
         private boolean includeUpdated;
         private boolean includeCreated;
         private boolean testExceptions = randomBoolean();
    @@ -160,7 +231,7 @@ protected BulkByScrollResponse createTestInstance() {
     
         @Override
         protected BulkByScrollResponse doParseInstance(XContentParser parser) throws IOException {
    -        return BulkByScrollResponse.fromXContent(parser);
    +        return PARSER.apply(parser, null).buildResponse();
         }
     
         @Override
    diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java
    index f70d40dcccd2b..0a6e31f44344c 100644
    --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java
    @@ -21,7 +21,6 @@
     import org.elasticsearch.xcontent.json.JsonXContent;
     
     import java.io.IOException;
    -import java.util.HashMap;
     import java.util.function.DoubleSupplier;
     
     import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
    @@ -124,40 +123,12 @@ public void testInvalidPointEmbeddedObject() throws IOException {
             try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
                 parser.nextToken();
                 Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), is("field [location] not supported - must be one of: lon, lat, z, type, coordinates, geohash"));
    +            assertThat(e.getMessage(), is("field [location] not supported - must be one of: lon, lat, z, type, coordinates"));
             }
             try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
                 parser2.nextToken();
                 Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean()));
    -            assertThat(e.getMessage(), is("field [location] not supported - must be one of: lon, lat, z, type, coordinates, geohash"));
    -        }
    -    }
    -
    -    public void testInvalidPointHashMix() throws IOException {
    -        HashMap otherFields = new HashMap<>();
    -        otherFields.put("lat", 0);
    -        otherFields.put("lon", 0);
    -        otherFields.put("type", "Point");
    -        otherFields.put("coordinates", new double[] { 0.0, 0.0 });
    -        for (String other : otherFields.keySet()) {
    -            XContentBuilder content = JsonXContent.contentBuilder();
    -            content.startObject();
    -            content.field(other, otherFields.get(other)).field("geohash", stringEncode(0d, 0d));
    -            content.endObject();
    -
    -            try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
    -                parser.nextToken();
    -                Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -                assertThat(e.getMessage(), is("field must be either lat/lon, geohash string or type/coordinates"));
    -            }
    -            try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
    -                parser2.nextToken();
    -                Exception e = expectThrows(
    -                    ElasticsearchParseException.class,
    -                    () -> GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())
    -                );
    -                assertThat(e.getMessage(), is("field must be either lat/lon, geohash string or type/coordinates"));
    -            }
    +            assertThat(e.getMessage(), is("field [location] not supported - must be one of: lon, lat, z, type, coordinates"));
             }
         }
     
    @@ -170,27 +141,13 @@ public void testInvalidField() throws IOException {
             try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
                 parser.nextToken();
                 Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), is("field [test] not supported - must be one of: lon, lat, z, type, coordinates, geohash"));
    +            assertThat(e.getMessage(), is("field [test] not supported - must be one of: lon, lat, z, type, coordinates"));
             }
     
             try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
                 parser2.nextToken();
                 Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean()));
    -            assertThat(e.getMessage(), is("field [test] not supported - must be one of: lon, lat, z, type, coordinates, geohash"));
    -        }
    -    }
    -
    -    public void testInvalidGeoHash() throws IOException {
    -        XContentBuilder content = JsonXContent.contentBuilder();
    -        content.startObject();
    -        content.field("geohash", "!!!!");
    -        content.endObject();
    -
    -        try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) {
    -            parser.nextToken();
    -
    -            Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]"));
    +            assertThat(e.getMessage(), is("field [test] not supported - must be one of: lon, lat, z, type, coordinates"));
             }
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
    index ba9e6b10150bc..09f0cf3ea8f26 100644
    --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
    @@ -11,7 +11,6 @@
     import org.elasticsearch.ElasticsearchParseException;
     import org.elasticsearch.common.geo.GeoPoint;
     import org.elasticsearch.common.geo.GeoUtils;
    -import org.elasticsearch.geometry.utils.Geohash;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xcontent.XContentBuilder;
     import org.elasticsearch.xcontent.XContentParser;
    @@ -20,11 +19,9 @@
     import java.io.IOException;
     
     import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
    -import static org.hamcrest.Matchers.allOf;
     import static org.hamcrest.Matchers.closeTo;
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
    -import static org.hamcrest.Matchers.greaterThanOrEqualTo;
     import static org.hamcrest.Matchers.is;
     import static org.hamcrest.Matchers.lessThanOrEqualTo;
     import static org.hamcrest.Matchers.not;
    @@ -503,45 +500,6 @@ public void testParseGeoPointArrayZValueError() throws IOException {
             }
         }
     
    -    public void testParseGeoPointGeohash() throws IOException {
    -        for (int i = 0; i < 100; i++) {
    -            int geoHashLength = randomIntBetween(1, Geohash.PRECISION);
    -            StringBuilder geohashBuilder = new StringBuilder(geoHashLength);
    -            for (int j = 0; j < geoHashLength; j++) {
    -                geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]);
    -            }
    -            XContentBuilder json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject();
    -            try (XContentParser parser = createParser(json)) {
    -                parser.nextToken();
    -                GeoPoint point = GeoUtils.parseGeoPoint(parser);
    -                assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0)));
    -                assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0)));
    -                assertThat(parser.currentToken(), is(Token.END_OBJECT));
    -                assertNull(parser.nextToken());
    -            }
    -            json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject();
    -            try (XContentParser parser = createParser(json)) {
    -                while (parser.currentToken() != Token.VALUE_STRING) {
    -                    parser.nextToken();
    -                }
    -                GeoPoint point = GeoUtils.parseGeoPoint(parser);
    -                assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0)));
    -                assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0)));
    -            }
    -        }
    -    }
    -
    -    public void testParseGeoPointGeohashWrongType() throws IOException {
    -        XContentBuilder json = jsonBuilder().startObject().field("geohash", 1.0).endObject();
    -        try (XContentParser parser = createParser(json)) {
    -            parser.nextToken();
    -            Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), containsString("[geohash] must be a string"));
    -            assertThat(parser.currentToken(), is(Token.END_OBJECT));
    -            assertNull(parser.nextToken());
    -        }
    -    }
    -
         public void testParseGeoPointLatNoLon() throws IOException {
             double lat = 0.0;
             XContentBuilder json = jsonBuilder().startObject().field("lat", lat).endObject();
    @@ -691,19 +649,7 @@ public void testParseGeoPointExtraField() throws IOException {
             try (XContentParser parser = createParser(json)) {
                 parser.nextToken();
                 Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), is("field [foo] not supported - must be one of: lon, lat, z, type, coordinates, geohash"));
    -        }
    -    }
    -
    -    public void testParseGeoPointLonLatGeoHash() throws IOException {
    -        double lat = 0.0;
    -        double lon = 0.0;
    -        String geohash = "abcd";
    -        XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("geohash", geohash).endObject();
    -        try (XContentParser parser = createParser(json)) {
    -            parser.nextToken();
    -            Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
    -            assertThat(e.getMessage(), containsString("fields matching more than one point format found"));
    +            assertThat(e.getMessage(), is("field [foo] not supported - must be one of: lon, lat, z, type, coordinates"));
             }
         }
     
    @@ -772,7 +718,9 @@ public void testParseGeoPointGeohashPositions() throws IOException {
         }
     
         private GeoPoint parseGeohash(String geohash, GeoUtils.EffectivePoint effectivePoint) throws IOException {
    -        try (XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject())) {
    +        try (XContentParser parser = createParser(jsonBuilder().startObject().field("location", geohash).endObject())) {
    +            parser.nextToken();
    +            parser.nextToken();
                 parser.nextToken();
                 return GeoUtils.parseGeoPoint(parser, randomBoolean(), effectivePoint);
             }
    diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java
    index b78aa89a8fcec..6c992510e7dbd 100644
    --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java
    +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java
    @@ -12,7 +12,6 @@
     import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.index.engine.SafeCommitInfo;
     import org.elasticsearch.index.shard.ShardId;
    @@ -24,6 +23,7 @@
     import java.util.function.LongSupplier;
     import java.util.function.Supplier;
     
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
     
     public abstract class ReplicationTrackerTestCase extends ESTestCase {
    @@ -54,14 +54,9 @@ static String nodeIdFromAllocationId(final AllocationId allocationId) {
     
         static IndexShardRoutingTable routingTable(final Set initializingIds, final AllocationId primaryId) {
             final ShardId shardId = new ShardId("test", "_na_", 0);
    -        final ShardRouting primaryShard = TestShardRouting.newShardRouting(
    -            shardId,
    -            nodeIdFromAllocationId(primaryId),
    -            null,
    -            true,
    -            ShardRoutingState.STARTED,
    -            primaryId
    -        );
    +        final ShardRouting primaryShard = shardRoutingBuilder(shardId, nodeIdFromAllocationId(primaryId), true, ShardRoutingState.STARTED)
    +            .withAllocationId(primaryId)
    +            .build();
             return routingTable(initializingIds, primaryShard);
         }
     
    @@ -71,14 +66,9 @@ static IndexShardRoutingTable routingTable(final Set initializingI
             final IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(shardId);
             for (final AllocationId initializingId : initializingIds) {
                 builder.addShard(
    -                TestShardRouting.newShardRouting(
    -                    shardId,
    -                    nodeIdFromAllocationId(initializingId),
    -                    null,
    -                    false,
    -                    ShardRoutingState.INITIALIZING,
    -                    initializingId
    -                )
    +                shardRoutingBuilder(shardId, nodeIdFromAllocationId(initializingId), false, ShardRoutingState.INITIALIZING)
    +                    .withAllocationId(initializingId)
    +                    .build()
                 );
             }
     
    diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java
    index ba661b97ce35c..bab15079f4522 100644
    --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java
    @@ -14,7 +14,6 @@
     import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.common.Randomness;
     import org.elasticsearch.common.io.stream.BytesStreamOutput;
     import org.elasticsearch.common.io.stream.StreamInput;
    @@ -48,6 +47,7 @@
     import java.util.stream.Stream;
     
     import static java.util.Collections.emptySet;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
     import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
     import static org.hamcrest.Matchers.equalTo;
    @@ -931,14 +931,14 @@ private static FakeClusterState initialState() {
             activeAllocationIds.remove(primaryId);
             activeAllocationIds.add(relocatingId);
             final ShardId shardId = new ShardId("test", "_na_", 0);
    -        final ShardRouting primaryShard = TestShardRouting.newShardRouting(
    +        final ShardRouting primaryShard = shardRoutingBuilder(
                 shardId,
                 nodeIdFromAllocationId(relocatingId),
    -            nodeIdFromAllocationId(AllocationId.newInitializing(relocatingId.getRelocationId())),
                 true,
    -            ShardRoutingState.RELOCATING,
    -            relocatingId
    -        );
    +            ShardRoutingState.RELOCATING
    +        ).withRelocatingNodeId(nodeIdFromAllocationId(AllocationId.newInitializing(relocatingId.getRelocationId())))
    +            .withAllocationId(relocatingId)
    +            .build();
     
             return new FakeClusterState(initialClusterStateVersion, activeAllocationIds, routingTable(initializingAllocationIds, primaryShard));
         }
    diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java
    index aa3be82801008..668a47645a17f 100644
    --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java
    @@ -12,6 +12,7 @@
     import org.elasticsearch.action.support.ActionTestUtils;
     import org.elasticsearch.action.support.PlainActionFuture;
     import org.elasticsearch.common.settings.Settings;
    +import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
     import org.elasticsearch.common.util.concurrent.EsExecutors;
     import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
     import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor;
    @@ -29,11 +30,9 @@
     import org.junit.BeforeClass;
     
     import java.util.ArrayList;
    -import java.util.Collections;
     import java.util.List;
     import java.util.Set;
     import java.util.concurrent.BrokenBarrierException;
    -import java.util.concurrent.ConcurrentHashMap;
     import java.util.concurrent.CountDownLatch;
     import java.util.concurrent.CyclicBarrier;
     import java.util.concurrent.ExecutionException;
    @@ -425,7 +424,7 @@ public void testAsyncBlockOperationsRace() throws Exception {
             final int operations = scaledRandomIntBetween(1, 64);
             final CyclicBarrier barrier = new CyclicBarrier(1 + 1 + operations);
             final CountDownLatch operationLatch = new CountDownLatch(1 + operations);
    -        final Set values = Collections.newSetFromMap(new ConcurrentHashMap<>());
    +        final Set values = ConcurrentCollections.newConcurrentSet();
             final List threads = new ArrayList<>();
             for (int i = 0; i < operations; i++) {
                 final int value = i;
    diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
    index e6d6de16cff2c..d7ca3a0808f78 100644
    --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
    @@ -172,7 +172,7 @@
     import java.util.stream.Stream;
     
     import static java.util.Collections.emptySet;
    -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex;
     import static org.elasticsearch.index.IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING;
     import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
    @@ -254,7 +254,7 @@ public void testPersistenceStateMetadataPersistence() throws Exception {
                 new ShardStateMetadata(routing.primary(), shard.indexSettings().getUUID(), routing.allocationId())
             );
     
    -        routing = TestShardRouting.relocate(shard.shardRouting, "some node", 42L);
    +        routing = shard.shardRouting.relocate("some node", 42L);
             IndexShardTestCase.updateRoutingEntry(shard, routing);
             shardStateMetadata = load(logger, shardStatePath);
             assertEquals(shardStateMetadata, getShardStateMetadata(shard));
    @@ -631,14 +631,12 @@ public void testPrimaryPromotionRollsGeneration() throws Exception {
             // promote the replica
             final ShardRouting replicaRouting = indexShard.routingEntry();
             final long newPrimaryTerm = indexShard.getPendingPrimaryTerm() + between(1, 10000);
    -        final ShardRouting primaryRouting = newShardRouting(
    +        final ShardRouting primaryRouting = shardRoutingBuilder(
                 replicaRouting.shardId(),
                 replicaRouting.currentNodeId(),
    -            null,
                 true,
    -            ShardRoutingState.STARTED,
    -            replicaRouting.allocationId()
    -        );
    +            ShardRoutingState.STARTED
    +        ).withAllocationId(replicaRouting.allocationId()).build();
             indexShard.updateShardState(
                 primaryRouting,
                 newPrimaryTerm,
    @@ -681,14 +679,9 @@ public void testOperationPermitsOnPrimaryShards() throws Exception {
             if (randomBoolean()) {
                 // relocation target
                 indexShard = newShard(
    -                newShardRouting(
    -                    shardId,
    -                    "local_node",
    -                    "other node",
    -                    true,
    -                    ShardRoutingState.INITIALIZING,
    -                    AllocationId.newRelocation(AllocationId.newInitializing())
    -                )
    +                shardRoutingBuilder(shardId, "local_node", true, ShardRoutingState.INITIALIZING).withRelocatingNodeId("other node")
    +                    .withAllocationId(AllocationId.newRelocation(AllocationId.newInitializing()))
    +                    .build()
                 );
                 assertEquals(0, indexShard.getActiveOperationsCount());
                 isPrimaryMode = false;
    @@ -696,14 +689,12 @@ public void testOperationPermitsOnPrimaryShards() throws Exception {
                 // simulate promotion
                 indexShard = newStartedShard(false);
                 ShardRouting replicaRouting = indexShard.routingEntry();
    -            ShardRouting primaryRouting = newShardRouting(
    +            ShardRouting primaryRouting = shardRoutingBuilder(
                     replicaRouting.shardId(),
                     replicaRouting.currentNodeId(),
    -                null,
                     true,
    -                ShardRoutingState.STARTED,
    -                replicaRouting.allocationId()
    -            );
    +                ShardRoutingState.STARTED
    +            ).withAllocationId(replicaRouting.allocationId()).build();
                 final long newPrimaryTerm = indexShard.getPendingPrimaryTerm() + between(1, 1000);
                 CountDownLatch latch = new CountDownLatch(1);
                 indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> {
    @@ -926,14 +917,16 @@ public void testOperationPermitOnReplicaShards() throws Exception {
                 case 1 -> {
                     // initializing replica / primary
                     final boolean relocating = randomBoolean();
    -                ShardRouting routing = newShardRouting(
    +                ShardRouting routing = shardRoutingBuilder(
                         shardId,
                         "local_node",
    -                    relocating ? "sourceNode" : null,
                         relocating ? randomBoolean() : false,
    -                    ShardRoutingState.INITIALIZING,
    -                    relocating ? AllocationId.newRelocation(AllocationId.newInitializing()) : AllocationId.newInitializing()
    -                );
    +                    ShardRoutingState.INITIALIZING
    +                ).withRelocatingNodeId(relocating ? "sourceNode" : null)
    +                    .withAllocationId(
    +                        relocating ? AllocationId.newRelocation(AllocationId.newInitializing()) : AllocationId.newInitializing()
    +                    )
    +                    .build();
                     indexShard = newShard(routing);
                     engineClosed = true;
                 }
    @@ -941,14 +934,12 @@ public void testOperationPermitOnReplicaShards() throws Exception {
                     // relocation source
                     indexShard = newStartedShard(true);
                     ShardRouting routing = indexShard.routingEntry();
    -                final ShardRouting newRouting = newShardRouting(
    +                final ShardRouting newRouting = shardRoutingBuilder(
                         routing.shardId(),
                         routing.currentNodeId(),
    -                    "otherNode",
                         true,
    -                    ShardRoutingState.RELOCATING,
    -                    AllocationId.newRelocation(routing.allocationId())
    -                );
    +                    ShardRoutingState.RELOCATING
    +                ).withRelocatingNodeId("otherNode").withAllocationId(AllocationId.newRelocation(routing.allocationId())).build();
                     IndexShardTestCase.updateRoutingEntry(indexShard, newRouting);
                     blockingCallRelocated(indexShard, newRouting, (primaryContext, listener) -> listener.onResponse(null));
                     engineClosed = false;
    @@ -1166,13 +1157,9 @@ public void testAcquireReplicaPermitAdvanceMaxSeqNoOfUpdates() throws Exception
         public void testGlobalCheckpointSync() throws IOException {
             // create the primary shard with a callback that sets a boolean when the global checkpoint sync is invoked
             final ShardId shardId = new ShardId("index", "_na_", 0);
    -        final ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            shardId,
    -            randomAlphaOfLength(8),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        final ShardRouting shardRouting = shardRoutingBuilder(shardId, randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE)
    +            .build();
             final Settings settings = indexSettings(IndexVersion.current(), 1, 2).build();
             final IndexMetadata.Builder indexMetadata = IndexMetadata.builder(shardRouting.getIndexName()).settings(settings).primaryTerm(0, 1);
             final AtomicBoolean synced = new AtomicBoolean();
    @@ -1237,13 +1224,9 @@ public void testClosedIndicesSkipSyncGlobalCheckpoint() throws Exception {
                 .settings(indexSettings(IndexVersion.current(), 1, 2))
                 .state(IndexMetadata.State.CLOSE)
                 .primaryTerm(0, 1);
    -        ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            shardId,
    -            randomAlphaOfLength(8),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        ShardRouting shardRouting = shardRoutingBuilder(shardId, randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE)
    +            .build();
             AtomicBoolean synced = new AtomicBoolean();
             IndexShard primaryShard = newShard(
                 shardRouting,
    @@ -1537,13 +1520,9 @@ public void run() {
     
         public void testAsyncPersistGlobalCheckpointSync() throws InterruptedException, IOException {
             final ShardId shardId = new ShardId("index", "_na_", 0);
    -        final ShardRouting shardRouting = TestShardRouting.newShardRouting(
    -            shardId,
    -            randomAlphaOfLength(8),
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        final ShardRouting shardRouting = shardRoutingBuilder(shardId, randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE)
    +            .build();
             final Settings settings = indexSettings(IndexVersion.current(), 1, 2).build();
             final IndexMetadata.Builder indexMetadata = IndexMetadata.builder(shardRouting.getIndexName()).settings(settings).primaryTerm(0, 1);
             IndexShard shard = newShard(
    @@ -1642,13 +1621,9 @@ public void testShardStats() throws IOException {
         public void testShardStatsWithFailures() throws IOException {
             allowShardFailures();
             final ShardId shardId = new ShardId("index", "_na_", 0);
    -        final ShardRouting shardRouting = newShardRouting(
    -            shardId,
    -            "node",
    -            true,
    -            ShardRoutingState.INITIALIZING,
    +        final ShardRouting shardRouting = shardRoutingBuilder(shardId, "node", true, ShardRoutingState.INITIALIZING).withRecoverySource(
                 RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        ).build();
             final NodeEnvironment.DataPath dataPath = new NodeEnvironment.DataPath(createTempDir());
     
             ShardPath shardPath = new ShardPath(false, dataPath.resolve(shardId), dataPath.resolve(shardId), shardId);
    @@ -2250,13 +2225,9 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
             final ShardRouting replicaRouting = shard.routingEntry();
             IndexShard newShard = reinitShard(
                 shard,
    -            newShardRouting(
    -                replicaRouting.shardId(),
    -                replicaRouting.currentNodeId(),
    -                true,
    -                ShardRoutingState.INITIALIZING,
    -                RecoverySource.ExistingStoreRecoverySource.INSTANCE
    -            )
    +            shardRoutingBuilder(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING)
    +                .withRecoverySource(RecoverySource.ExistingStoreRecoverySource.INSTANCE)
    +                .build()
             );
             DiscoveryNode localNode = DiscoveryNodeUtils.builder("foo").roles(emptySet()).build();
             newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null));
    @@ -2321,13 +2292,9 @@ public void testRecoverFromStalePrimaryForceNewHistoryUUID() throws IOException
             String historyUUID = shard.getHistoryUUID();
             IndexShard newShard = reinitShard(
                 shard,
    -            newShardRouting(
    -                shard.shardId(),
    -                shard.shardRouting.currentNodeId(),
    -                true,
    -                ShardRoutingState.INITIALIZING,
    -                RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE
    -            )
    +            shardRoutingBuilder(shard.shardId(), shard.shardRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING)
    +                .withRecoverySource(RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE)
    +                .build()
             );
             DiscoveryNode localNode = DiscoveryNodeUtils.builder("foo").roles(emptySet()).build();
             newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null));
    @@ -2549,13 +2516,9 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception {
                 .build();
             closeShards(shard);
             IndexShard newShard = newShard(
    -            newShardRouting(
    -                replicaRouting.shardId(),
    -                replicaRouting.currentNodeId(),
    -                true,
    -                ShardRoutingState.INITIALIZING,
    -                RecoverySource.ExistingStoreRecoverySource.INSTANCE
    -            ),
    +            shardRoutingBuilder(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING)
    +                .withRecoverySource(RecoverySource.ExistingStoreRecoverySource.INSTANCE)
    +                .build(),
                 shard.shardPath(),
                 newShardIndexMetadata,
                 null,
    @@ -3136,13 +3099,9 @@ public void testRecoverFromLocalShard() throws IOException {
             indexDoc(sourceShard, "_doc", "1", "{\"foo\" : \"bar\"}");
             sourceShard.refresh("test");
     
    -        ShardRouting targetRouting = newShardRouting(
    -            new ShardId("index_1", "index_1", 0),
    -            "n1",
    -            true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.LocalShardsRecoverySource.INSTANCE
    -        );
    +        ShardRouting targetRouting = shardRoutingBuilder(new ShardId("index_1", "index_1", 0), "n1", true, ShardRoutingState.INITIALIZING)
    +            .withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
    +            .build();
     
             final IndexShard targetShard;
             DiscoveryNode localNode = DiscoveryNodeUtils.builder("foo").roles(emptySet()).build();
    @@ -4639,13 +4598,12 @@ public void testDoNotTrimCommitsWhenOpenReadOnlyEngine() throws Exception {
             shard.flush(new FlushRequest());
             assertThat(shard.docStats().getCount(), equalTo(numDocs));
             final ShardRouting replicaRouting = shard.routingEntry();
    -        ShardRouting readonlyShardRouting = newShardRouting(
    +        ShardRouting readonlyShardRouting = shardRoutingBuilder(
                 replicaRouting.shardId(),
                 replicaRouting.currentNodeId(),
                 true,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.ExistingStoreRecoverySource.INSTANCE
    -        );
    +            ShardRoutingState.INITIALIZING
    +        ).withRecoverySource(RecoverySource.ExistingStoreRecoverySource.INSTANCE).build();
             final IndexShard readonlyShard = reinitShard(
                 shard,
                 readonlyShardRouting,
    @@ -4744,13 +4702,12 @@ public void testShardExposesWriteLoadStats() throws Exception {
             }
     
             final FakeClock fakeClock = new FakeClock();
    -        final ShardRouting shardRouting = newShardRouting(
    +        final ShardRouting shardRouting = shardRoutingBuilder(
                 primary.shardId(),
                 randomAlphaOfLength(10),
                 false,
    -            ShardRoutingState.INITIALIZING,
    -            RecoverySource.PeerRecoverySource.INSTANCE
    -        );
    +            ShardRoutingState.INITIALIZING
    +        ).withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE).build();
             final ShardId shardId = shardRouting.shardId();
             final NodeEnvironment.DataPath dataPath = new NodeEnvironment.DataPath(createTempDir());
             final ShardPath shardPath = new ShardPath(false, dataPath.resolve(shardId), dataPath.resolve(shardId), shardId);
    diff --git a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java
    index f1218dd6d797d..36e38d7246c16 100644
    --- a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java
    @@ -31,7 +31,6 @@
     import org.elasticsearch.cluster.routing.ShardRouting;
     import org.elasticsearch.cluster.routing.ShardRoutingHelper;
     import org.elasticsearch.cluster.routing.ShardRoutingState;
    -import org.elasticsearch.cluster.routing.TestShardRouting;
     import org.elasticsearch.common.UUIDs;
     import org.elasticsearch.common.settings.ClusterSettings;
     import org.elasticsearch.common.settings.Settings;
    @@ -66,6 +65,7 @@
     import java.util.regex.Matcher;
     import java.util.regex.Pattern;
     
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     import static org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand.TRUNCATE_CLEAN_TRANSLOG_FLAG;
     import static org.hamcrest.Matchers.allOf;
     import static org.hamcrest.Matchers.containsString;
    @@ -97,13 +97,9 @@ public class RemoveCorruptedShardDataCommandTests extends IndexShardTestCase {
         public void setup() throws IOException {
             shardId = new ShardId("index0", UUIDs.randomBase64UUID(), 0);
             final String nodeId = randomAlphaOfLength(10);
    -        routing = TestShardRouting.newShardRouting(
    -            shardId,
    -            nodeId,
    -            true,
    -            ShardRoutingState.INITIALIZING,
    +        routing = shardRoutingBuilder(shardId, nodeId, true, ShardRoutingState.INITIALIZING).withRecoverySource(
                 RecoverySource.EmptyStoreRecoverySource.INSTANCE
    -        );
    +        ).build();
     
             dataPaths = new Path[] { createTempDir(), createTempDir(), createTempDir() };
             final String[] tmpPaths = Arrays.stream(dataPaths).map(s -> s.toAbsolutePath().toString()).toArray(String[]::new);
    diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java
    index 9e1be4c629b4a..20493ee576c0a 100644
    --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java
    @@ -24,6 +24,7 @@
     import org.elasticsearch.xcontent.XContentType;
     
     import java.io.IOException;
    +import java.util.Arrays;
     import java.util.function.LongSupplier;
     
     import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
    @@ -114,6 +115,20 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException {
             runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true);
         }
     
    +    public void testGetFromTranslogWithDenseVector() throws IOException {
    +        float[] vector = new float[2048];
    +        for (int i = 0; i < vector.length; i++) {
    +            vector[i] = randomFloat();
    +        }
    +        String docToIndex = Strings.format("""
    +            {
    +                "bar": %s,
    +                "foo": "foo"
    +            }
    +            """, Arrays.toString(vector));
    +        runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", docToIndex, "\"text\"", "foo", "\"dense_vector\"", false);
    +    }
    +
         private void runGetFromTranslogWithOptions(
             String docToIndex,
             String sourceOptions,
    @@ -122,23 +137,48 @@ private void runGetFromTranslogWithOptions(
             Object expectedFooVal,
             boolean sourceOnlyFetchCreatesInMemoryReader
         ) throws IOException {
    -        IndexMetadata metadata = IndexMetadata.builder("test").putMapping(Strings.format("""
    -            {
    -              "properties": {
    -                "foo": {
    -                  "type": %s,
    -                  "store": true
    -                },
    -                "bar": { "type": %s }
    -              },
    -              "_source": { %s }
    -              }
    -            }""", fieldType, fieldType, sourceOptions)).settings(indexSettings(IndexVersion.current(), 1, 1)).primaryTerm(0, 1).build();
    +        runGetFromTranslogWithOptions(
    +            docToIndex,
    +            sourceOptions,
    +            expectedResult,
    +            fieldType,
    +            expectedFooVal,
    +            fieldType,
    +            sourceOnlyFetchCreatesInMemoryReader
    +        );
    +    }
    +
    +    private void runGetFromTranslogWithOptions(
    +        String docToIndex,
    +        String sourceOptions,
    +        String expectedResult,
    +        String fieldTypeFoo,
    +        Object expectedFooVal,
    +        String fieldTypeBar,
    +        boolean sourceOnlyFetchCreatesInMemoryReader
    +    ) throws IOException {
    +        IndexMetadata metadata = IndexMetadata.builder("test")
    +            .putMapping(Strings.format("""
    +                {
    +                  "properties": {
    +                    "foo": {
    +                      "type": %s,
    +                      "store": true
    +                    },
    +                    "bar": { "type": %s }
    +                  },
    +                  "_source": { %s }
    +                  }
    +                }""", fieldTypeFoo, fieldTypeBar, sourceOptions))
    +            .settings(indexSettings(IndexVersion.current(), 1, 1))
    +            .primaryTerm(0, 1)
    +            .build();
             IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, EngineTestCase.randomReaderWrapper());
             recoverShardFromStore(primary);
             LongSupplier translogInMemorySegmentCount = ((InternalEngine) primary.getEngine()).translogInMemorySegmentsCount::get;
             long translogInMemorySegmentCountExpected = 0;
    -        indexDoc(primary, "test", "0", docToIndex);
    +        Engine.IndexResult res = indexDoc(primary, "test", "0", docToIndex);
    +        assertTrue(res.isCreated());
             assertTrue(primary.getEngine().refreshNeeded());
             GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
             assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME));
    diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java
    index 851ad18500add..86408b3b22ed7 100644
    --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java
    +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java
    @@ -22,6 +22,7 @@
     import org.apache.lucene.search.Weight;
     import org.apache.lucene.store.Directory;
     import org.apache.lucene.tests.index.RandomIndexWriter;
    +import org.apache.lucene.tests.util.LuceneTestCase;
     import org.apache.lucene.util.BytesRef;
     import org.elasticsearch.cluster.metadata.IndexMetadata;
     import org.elasticsearch.cluster.routing.IndexRouting;
    @@ -46,7 +47,7 @@ public void testSplitOnID() throws IOException {
             SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
             Directory dir = newFSDirectory(createTempDir());
             final int numDocs = randomIntBetween(50, 100);
    -        RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    +        RandomIndexWriter writer = createIndexWriter(dir);
             int numShards = randomIntBetween(2, 10);
             IndexMetadata metadata = IndexMetadata.builder("test")
                 .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
    @@ -72,7 +73,7 @@ public void testSplitOnRouting() throws IOException {
             SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
             Directory dir = newFSDirectory(createTempDir());
             final int numDocs = randomIntBetween(50, 100);
    -        RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    +        RandomIndexWriter writer = createIndexWriter(dir);
             int numShards = randomIntBetween(2, 10);
             IndexMetadata metadata = IndexMetadata.builder("test")
                 .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
    @@ -97,7 +98,7 @@ public void testSplitOnIdOrRouting() throws IOException {
             SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
             Directory dir = newFSDirectory(createTempDir());
             final int numDocs = randomIntBetween(50, 100);
    -        RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    +        RandomIndexWriter writer = createIndexWriter(dir);
             int numShards = randomIntBetween(2, 10);
             IndexMetadata metadata = IndexMetadata.builder("test")
                 .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
    @@ -124,7 +125,7 @@ public void testSplitOnRoutingPartitioned() throws IOException {
             SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
             Directory dir = newFSDirectory(createTempDir());
             final int numDocs = randomIntBetween(50, 100);
    -        RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    +        RandomIndexWriter writer = createIndexWriter(dir);
             int numShards = randomIntBetween(2, 10);
             IndexMetadata metadata = IndexMetadata.builder("test")
                 .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
    @@ -236,4 +237,12 @@ private Iterable topLevel(IndexRouting indexRouting, int id, @Nu
         private int shardId(IndexRouting indexRouting, int id, @Nullable String routing) {
             return indexRouting.getShard(Integer.toString(id), routing);
         }
    +
    +    private static RandomIndexWriter createIndexWriter(Directory dir) throws IOException {
    +        return new RandomIndexWriter(
    +            random(),
    +            dir,
    +            LuceneTestCase.newIndexWriterConfig().setMergePolicy(LuceneTestCase.newMergePolicy(random(), false))
    +        );
    +    }
     }
    diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
    index 13ecc0841ba55..ca7dd2683f211 100644
    --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
    +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java
    @@ -74,7 +74,6 @@
     import org.elasticsearch.cluster.service.MasterService;
     import org.elasticsearch.cluster.version.CompatibilityVersions;
     import org.elasticsearch.common.UUIDs;
    -import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.ClusterSettings;
     import org.elasticsearch.common.settings.IndexScopedSettings;
     import org.elasticsearch.common.settings.Settings;
    @@ -269,14 +268,7 @@ public IndexMetadata verifyIndexMetadata(IndexMetadata indexMetadata, IndexVersi
                     actionFilters
                 )
             );
    -        client.initialize(
    -            actions,
    -            transportService.getTaskManager(),
    -            null,
    -            transportService.getLocalNodeConnection(),
    -            null,
    -            new NamedWriteableRegistry(List.of())
    -        );
    +        client.initialize(actions, transportService.getTaskManager(), null, transportService.getLocalNodeConnection(), null);
     
             ShardLimitValidator shardLimitValidator = new ShardLimitValidator(SETTINGS, clusterService);
             MetadataIndexStateService indexStateService = new MetadataIndexStateService(
    diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
    index 0317a6baf040a..d27e924110c15 100644
    --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
    @@ -211,7 +211,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
             IndexShard shard = newShard(false);
             shard.markAsRecovering("for testing", new RecoveryState(shard.routingEntry(), localNode, localNode));
             shard.prepareForIndexRecovery();
    -        assertThat(shard.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    +        assertThat(recoverLocallyUpToGlobalCheckpoint(shard), equalTo(UNASSIGNED_SEQ_NO));
             assertThat(shard.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
             assertThat(shard.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
             assertThat(shard.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    @@ -239,7 +239,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
             );
             replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
             replica.prepareForIndexRecovery();
    -        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(globalCheckpoint + 1));
    +        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(globalCheckpoint + 1));
             assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(expectedTotalLocal));
             assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(expectedTotalLocal));
             assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    @@ -254,7 +254,7 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
             replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE));
             replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
             replica.prepareForIndexRecovery();
    -        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    +        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(UNASSIGNED_SEQ_NO));
             assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
             assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
             assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    @@ -276,10 +276,10 @@ public void testPrepareIndexForPeerRecovery() throws Exception {
             replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
             replica.prepareForIndexRecovery();
             if (safeCommit.isPresent()) {
    -            assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1));
    +            assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(safeCommit.get().localCheckpoint + 1));
                 assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0));
             } else {
    -            assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    +            assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(UNASSIGNED_SEQ_NO));
                 assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN));
             }
             assertThat(replica.recoveryState().getStage(), equalTo(RecoveryState.Stage.TRANSLOG));
    @@ -313,7 +313,7 @@ public void testClosedIndexSkipsLocalRecovery() throws Exception {
             );
             replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode));
             replica.prepareForIndexRecovery();
    -        assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1));
    +        assertThat(recoverLocallyUpToGlobalCheckpoint(replica), equalTo(safeCommit.get().localCheckpoint + 1));
             assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0));
             assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0));
             assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
    @@ -328,7 +328,7 @@ public void testResetStartingSeqNoIfLastCommitCorrupted() throws Exception {
             shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE));
             shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
             shard.prepareForIndexRecovery();
    -        long startingSeqNo = shard.recoverLocallyUpToGlobalCheckpoint();
    +        long startingSeqNo = recoverLocallyUpToGlobalCheckpoint(shard);
             shard.store().markStoreCorrupted(new IOException("simulated"));
             RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, null, null, null);
             StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest(logger, rNode, recoveryTarget, startingSeqNo);
    diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
    index e2e56d33bbba0..f5ad34c7e57db 100644
    --- a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
    +++ b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
    @@ -75,7 +75,7 @@ public void testSerialization() throws Exception {
             assertThat(outRequest.recoveryId(), equalTo(inRequest.recoveryId()));
             assertThat(outRequest.startingSeqNo(), equalTo(inRequest.startingSeqNo()));
     
    -        if (serializationVersion.onOrAfter(TransportVersions.WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED)) {
    +        if (serializationVersion.onOrAfter(TransportVersions.V_8_11_X)) {
                 assertEquals(outRequest.clusterStateVersion(), inRequest.clusterStateVersion());
             } else {
                 assertEquals(0L, inRequest.clusterStateVersion());
    diff --git a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java
    index 877f8ce4dcb96..cb57096d02744 100644
    --- a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java
    +++ b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java
    @@ -61,10 +61,7 @@ public List> getSettings() {
     
         public static class UpdateInternalOrPrivateAction {
     
    -        public static final ActionType INSTANCE = new ActionType<>(
    -            "indices:admin/settings/update-internal-or-private-index",
    -            UpdateInternalOrPrivateAction.Response::new
    -        );
    +        public static final ActionType INSTANCE = new ActionType<>("indices:admin/settings/update-internal-or-private-index");
     
             public static class Request extends MasterNodeRequest {
     
    diff --git a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
    index 89fbe9ab060ab..b8b687cf0fe17 100644
    --- a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
    +++ b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
    @@ -23,6 +23,7 @@
     import java.util.Set;
     
     import static java.util.Collections.emptySet;
    +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
     
     public class IndicesStoreTests extends ESTestCase {
         private static final ShardRoutingState[] NOT_STARTED_STATES;
    @@ -66,7 +67,9 @@ public void testShardCanBeDeletedNoShardStarted() {
                 String currentNodeId = state == ShardRoutingState.UNASSIGNED ? null : randomAlphaOfLength(10);
                 String relocatingNodeId = state == ShardRoutingState.RELOCATING ? randomAlphaOfLength(10) : null;
                 routingTable.addShard(
    -                TestShardRouting.newShardRouting(shardId, currentNodeId, relocatingNodeId, j == 0, state, unassignedInfo)
    +                shardRoutingBuilder(shardId, currentNodeId, j == 0, state).withRelocatingNodeId(relocatingNodeId)
    +                    .withUnassignedInfo(unassignedInfo)
    +                    .build()
                 );
             }
             assertFalse(IndicesStore.shardCanBeDeleted(localNode.getId(), routingTable.build()));
    diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java
    index 7c85cba4c34eb..ee35491a74d00 100644
    --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java
    +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java
    @@ -9,7 +9,7 @@
     package org.elasticsearch.persistent;
     
     import org.elasticsearch.action.ActionListener;
    -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
    +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
     import org.elasticsearch.client.internal.Client;
     import org.elasticsearch.cluster.ClusterChangedEvent;
     import org.elasticsearch.cluster.ClusterName;
    @@ -253,12 +253,12 @@ public void testParamsStatusAndNodeTaskAreDelegated() throws Exception {
     
         public void testTaskCancellation() {
             AtomicLong capturedTaskId = new AtomicLong();
    -        AtomicReference> capturedListener = new AtomicReference<>();
    +        AtomicReference> capturedListener = new AtomicReference<>();
             Client client = mock(Client.class);
             when(client.settings()).thenReturn(Settings.EMPTY);
             PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) {
                 @Override
    -            void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) {
    +            void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) {
                     capturedTaskId.set(taskId);
                     capturedListener.set(listener);
                 }
    @@ -327,8 +327,7 @@ public void sendCompletionRequest(
             // That should trigger cancellation request
             assertThat(capturedTaskId.get(), equalTo(localId));
             // Notify successful cancellation
    -        capturedListener.get()
    -            .onResponse(new CancelTasksResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()));
    +        capturedListener.get().onResponse(new ListTasksResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()));
     
             // finish or fail task
             if (randomBoolean()) {
    @@ -349,7 +348,7 @@ public void testTaskLocalAbort() {
             when(client.settings()).thenReturn(Settings.EMPTY);
             PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) {
                 @Override
    -            void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) {
    +            void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) {
                     fail("Shouldn't be called during local abort");
                 }
     
    diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java
    index c131537c1815a..d0ed4f87dbc58 100644
    --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java
    +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java
    @@ -76,10 +76,7 @@
      */
     public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, PersistentTaskPlugin {
     
    -    public static final ActionType TEST_ACTION = new ActionType<>(
    -        "cluster:admin/persistent/task_test",
    -        TestTasksResponse::new
    -    );
    +    public static final ActionType TEST_ACTION = new ActionType<>("cluster:admin/persistent/task_test");
     
         @Override
         public List> getActions() {
    diff --git a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java
    index a9f025210933d..e13c0c135c5e9 100644
    --- a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java
    +++ b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java
    @@ -9,9 +9,8 @@
     package org.elasticsearch.plugins.spi;
     
     import org.elasticsearch.common.io.Streams;
    -import org.elasticsearch.search.aggregations.Aggregation;
    -import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
     import org.elasticsearch.search.suggest.Suggest;
    +import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
     import org.elasticsearch.search.suggest.term.TermSuggestion;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xcontent.NamedXContentRegistry;
    @@ -47,7 +46,7 @@ public void testNamedXContents() {
             assertEquals(2, namedXContents.size());
     
             List> predicates = new ArrayList<>(2);
    -        predicates.add(e -> Aggregation.class.equals(e.categoryClass) && "test_aggregation".equals(e.name.getPreferredName()));
    +        predicates.add(e -> Suggest.Suggestion.class.equals(e.categoryClass) && "phrase_aggregation".equals(e.name.getPreferredName()));
             predicates.add(e -> Suggest.Suggestion.class.equals(e.categoryClass) && "test_suggestion".equals(e.name.getPreferredName()));
             predicates.forEach(predicate -> assertEquals(1, namedXContents.stream().filter(predicate).count()));
         }
    @@ -60,9 +59,9 @@ public TestNamedXContentProvider() {}
             public List getNamedXContentParsers() {
                 return Arrays.asList(
                     new NamedXContentRegistry.Entry(
    -                    Aggregation.class,
    -                    new ParseField("test_aggregation"),
    -                    (parser, context) -> ParsedSimpleValue.fromXContent(parser, (String) context)
    +                    Suggest.Suggestion.class,
    +                    new ParseField("phrase_aggregation"),
    +                    (parser, context) -> PhraseSuggestion.fromXContent(parser, (String) context)
                     ),
                     new NamedXContentRegistry.Entry(
                         Suggest.Suggestion.class,
    diff --git a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyTests.java b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyTests.java
    index 485e2a3a3fdd7..cce2a8db25c8e 100644
    --- a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyTests.java
    @@ -29,7 +29,6 @@
     import java.util.ArrayList;
     import java.util.List;
     import java.util.Map;
    -import java.util.concurrent.atomic.AtomicBoolean;
     
     public class ChunkedRestResponseBodyTests extends ESTestCase {
     
    @@ -51,59 +50,39 @@ public void testEncodesChunkedXContentCorrectly() throws IOException {
             }
             final var bytesDirect = BytesReference.bytes(builderDirect);
     
    -        final var isClosed = new AtomicBoolean();
    -        try (
    -            var chunkedResponse = ChunkedRestResponseBody.fromXContent(
    -                chunkedToXContent,
    -                ToXContent.EMPTY_PARAMS,
    -                new FakeRestChannel(
    -                    new FakeRestRequest.Builder(xContentRegistry()).withContent(BytesArray.EMPTY, randomXContent.type()).build(),
    -                    randomBoolean(),
    -                    1
    -                ),
    -                () -> assertTrue(isClosed.compareAndSet(false, true))
    +        var chunkedResponse = ChunkedRestResponseBody.fromXContent(
    +            chunkedToXContent,
    +            ToXContent.EMPTY_PARAMS,
    +            new FakeRestChannel(
    +                new FakeRestRequest.Builder(xContentRegistry()).withContent(BytesArray.EMPTY, randomXContent.type()).build(),
    +                randomBoolean(),
    +                1
                 )
    -        ) {
    -
    -            final List refsGenerated = new ArrayList<>();
    -            while (chunkedResponse.isDone() == false) {
    -                refsGenerated.add(chunkedResponse.encodeChunk(randomIntBetween(2, 10), BytesRefRecycler.NON_RECYCLING_INSTANCE));
    -            }
    +        );
     
    -            assertEquals(bytesDirect, CompositeBytesReference.of(refsGenerated.toArray(new BytesReference[0])));
    -            assertFalse(isClosed.get());
    +        final List refsGenerated = new ArrayList<>();
    +        while (chunkedResponse.isDone() == false) {
    +            refsGenerated.add(chunkedResponse.encodeChunk(randomIntBetween(2, 10), BytesRefRecycler.NON_RECYCLING_INSTANCE));
             }
    -        assertTrue(isClosed.get());
    +
    +        assertEquals(bytesDirect, CompositeBytesReference.of(refsGenerated.toArray(new BytesReference[0])));
         }
     
         public void testFromTextChunks() throws IOException {
             final var chunks = randomList(1000, () -> randomUnicodeOfLengthBetween(1, 100));
    -        final var isClosed = new AtomicBoolean();
    -        try (
    -            var body = ChunkedRestResponseBody.fromTextChunks(
    -                "text/plain",
    -                Iterators.map(chunks.iterator(), s -> w -> w.write(s)),
    -                () -> assertTrue(isClosed.compareAndSet(false, true))
    -            )
    -        ) {
    -            final List refsGenerated = new ArrayList<>();
    -            while (body.isDone() == false) {
    -                refsGenerated.add(body.encodeChunk(randomIntBetween(2, 10), BytesRefRecycler.NON_RECYCLING_INSTANCE));
    -            }
    -            final BytesReference chunkedBytes = CompositeBytesReference.of(refsGenerated.toArray(new BytesReference[0]));
    +        var body = ChunkedRestResponseBody.fromTextChunks("text/plain", Iterators.map(chunks.iterator(), s -> w -> w.write(s)));
    +        final List refsGenerated = new ArrayList<>();
    +        while (body.isDone() == false) {
    +            refsGenerated.add(body.encodeChunk(randomIntBetween(2, 10), BytesRefRecycler.NON_RECYCLING_INSTANCE));
    +        }
    +        final BytesReference chunkedBytes = CompositeBytesReference.of(refsGenerated.toArray(new BytesReference[0]));
     
    -            try (
    -                var outputStream = new ByteArrayOutputStream();
    -                var writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)
    -            ) {
    -                for (final var chunk : chunks) {
    -                    writer.write(chunk);
    -                }
    -                writer.flush();
    -                assertEquals(new BytesArray(outputStream.toByteArray()), chunkedBytes);
    +        try (var outputStream = new ByteArrayOutputStream(); var writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)) {
    +            for (final var chunk : chunks) {
    +                writer.write(chunk);
                 }
    -            assertFalse(isClosed.get());
    +            writer.flush();
    +            assertEquals(new BytesArray(outputStream.toByteArray()), chunkedBytes);
             }
    -        assertTrue(isClosed.get());
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java
    index 00c65437579ec..37300f1c19b1c 100644
    --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java
    @@ -286,22 +286,25 @@ public void testRegisterSecondMethodWithDifferentNamedWildcard() {
             assertThat(exception.getMessage(), equalTo("Trying to use conflicting wildcard names for same path: wildcard1 and wildcard2"));
         }
     
    -    public void testRestHandlerWrapper() throws Exception {
    +    public void testRestInterceptor() throws Exception {
             AtomicBoolean handlerCalled = new AtomicBoolean(false);
             AtomicBoolean wrapperCalled = new AtomicBoolean(false);
    +        final boolean callHandler = randomBoolean();
             final RestHandler handler = (RestRequest request, RestChannel channel, NodeClient client) -> handlerCalled.set(true);
             final HttpServerTransport httpServerTransport = new TestHttpServerTransport();
    -        final RestController restController = new RestController(h -> {
    -            assertSame(handler, h);
    -            return (RestRequest request, RestChannel channel, NodeClient client) -> wrapperCalled.set(true);
    -        }, client, circuitBreakerService, usageService, tracer);
    +        final RestInterceptor interceptor = (request, channel, targetHandler, listener) -> {
    +            assertSame(handler, targetHandler);
    +            wrapperCalled.set(true);
    +            listener.onResponse(callHandler);
    +        };
    +        final RestController restController = new RestController(interceptor, client, circuitBreakerService, usageService, tracer);
             restController.registerHandler(new Route(GET, "/wrapped"), handler);
             RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON);
             AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST);
             restController.dispatchRequest(request, channel, client.threadPool().getThreadContext());
             httpServerTransport.start();
    -        assertTrue(wrapperCalled.get());
    -        assertFalse(handlerCalled.get());
    +        assertThat(wrapperCalled.get(), is(true));
    +        assertThat(handlerCalled.get(), is(callHandler));
         }
     
         public void testDispatchRequestAddsAndFreesBytesOnSuccess() {
    diff --git a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java
    index 41710d6c1b76c..41a54ac580a55 100644
    --- a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java
    @@ -49,6 +49,7 @@
     
     import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE;
     import static org.elasticsearch.ElasticsearchExceptionTests.assertDeepEquals;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
     import static org.hamcrest.Matchers.contains;
     import static org.hamcrest.Matchers.containsString;
     import static org.hamcrest.Matchers.equalTo;
    @@ -96,7 +97,8 @@ public void testWithHeaders() throws Exception {
         public void testEmptyChunkedBody() {
             RestResponse response = RestResponse.chunked(
                 RestStatus.OK,
    -            ChunkedRestResponseBody.fromTextChunks(RestResponse.TEXT_CONTENT_TYPE, Collections.emptyIterator(), null)
    +            ChunkedRestResponseBody.fromTextChunks(RestResponse.TEXT_CONTENT_TYPE, Collections.emptyIterator()),
    +            null
             );
             assertFalse(response.isChunked());
             assertNotNull(response.content());
    @@ -420,7 +422,7 @@ public void testErrorToAndFromXContent() throws IOException {
     
             ElasticsearchException parsedError;
             try (XContentParser parser = createParser(xContentType.xContent(), response.content())) {
    -            parsedError = RestResponse.errorFromXContent(parser);
    +            parsedError = errorFromXContent(parser);
                 assertNull(parser.nextToken());
             }
     
    @@ -436,13 +438,49 @@ public void testNoErrorFromXContent() throws IOException {
                     builder.endObject();
     
                     try (XContentParser parser = createParser(builder.contentType().xContent(), BytesReference.bytes(builder))) {
    -                    RestResponse.errorFromXContent(parser);
    +                    errorFromXContent(parser);
                     }
                 }
             });
             assertEquals("Failed to parse elasticsearch status exception: no exception was found", e.getMessage());
         }
     
    +    private static ElasticsearchStatusException errorFromXContent(XContentParser parser) throws IOException {
    +        XContentParser.Token token = parser.nextToken();
    +        ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
    +
    +        ElasticsearchException exception = null;
    +        RestStatus status = null;
    +
    +        String currentFieldName = null;
    +        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
    +            if (token == XContentParser.Token.FIELD_NAME) {
    +                currentFieldName = parser.currentName();
    +            }
    +            if (RestResponse.STATUS.equals(currentFieldName)) {
    +                if (token != XContentParser.Token.FIELD_NAME) {
    +                    ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
    +                    status = RestStatus.fromCode(parser.intValue());
    +                }
    +            } else {
    +                exception = ElasticsearchException.failureFromXContent(parser);
    +            }
    +        }
    +
    +        if (exception == null) {
    +            throw new IllegalStateException("Failed to parse elasticsearch status exception: no exception was found");
    +        }
    +
    +        ElasticsearchStatusException result = new ElasticsearchStatusException(exception.getMessage(), status, exception.getCause());
    +        for (String header : exception.getHeaderKeys()) {
    +            result.addHeader(header, exception.getHeader(header));
    +        }
    +        for (String metadata : exception.getMetadataKeys()) {
    +            result.addMetadata(metadata, exception.getMetadata(metadata));
    +        }
    +        return result;
    +    }
    +
         public void testResponseContentTypeUponException() throws Exception {
             String mediaType = XContentType.VND_JSON.toParsedMediaType()
                 .responseContentTypeHeader(
    diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
    index 7000f1a153ac6..761d2b454b134 100644
    --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java
    @@ -16,7 +16,6 @@
     import org.elasticsearch.action.support.TransportAction;
     import org.elasticsearch.client.internal.node.NodeClient;
     import org.elasticsearch.common.bytes.BytesArray;
    -import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.common.util.concurrent.ThreadContext;
     import org.elasticsearch.core.RestApiVersion;
    @@ -79,14 +78,7 @@ protected void doExecute(Task task, ActionRequest request, ActionListener();
             actions.put(ValidateQueryAction.INSTANCE, transportAction);
     
    -        client.initialize(
    -            actions,
    -            taskManager,
    -            () -> "local",
    -            mock(Transport.Connection.class),
    -            null,
    -            new NamedWriteableRegistry(List.of())
    -        );
    +        client.initialize(actions, taskManager, () -> "local", mock(Transport.Connection.class), null);
             controller.registerHandler(action);
         }
     
    diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestCatRecoveryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestCatRecoveryActionTests.java
    index 3db5faa505c9f..aebd18740b005 100644
    --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestCatRecoveryActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestCatRecoveryActionTests.java
    @@ -58,7 +58,7 @@ public void testRestRecoveryAction() {
                 when(timer.time()).thenReturn(time);
                 when(timer.stopTime()).thenReturn(startTime + time);
                 when(state.getTimer()).thenReturn(timer);
    -            when(state.getRecoverySource()).thenReturn(TestShardRouting.randomRecoverySource());
    +            when(state.getRecoverySource()).thenReturn(TestShardRouting.buildRecoverySource());
                 when(state.getStage()).thenReturn(randomFrom(RecoveryState.Stage.values()));
                 final DiscoveryNode sourceNode = randomBoolean() ? mock(DiscoveryNode.class) : null;
                 if (sourceNode != null) {
    diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
    index 6fadb71652163..7ad935744680f 100644
    --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java
    @@ -10,6 +10,7 @@
     
     import org.elasticsearch.action.search.MultiSearchResponse;
     import org.elasticsearch.common.bytes.BytesArray;
    +import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.rest.RestRequest;
    @@ -18,13 +19,14 @@
     import org.elasticsearch.usage.UsageService;
     import org.elasticsearch.xcontent.XContentType;
     import org.junit.Before;
    -import org.mockito.Mockito;
     
     import java.nio.charset.StandardCharsets;
     import java.util.Collections;
     import java.util.List;
     import java.util.Map;
     
    +import static org.mockito.Mockito.mock;
    +
     public final class RestMultiSearchActionTests extends RestActionTestCase {
         final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7));
     
    @@ -32,10 +34,10 @@ public final class RestMultiSearchActionTests extends RestActionTestCase {
     
         @Before
         public void setUpAction() {
    -        action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder());
    +        action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class));
             controller().registerHandler(action);
    -        verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(MultiSearchResponse.class));
    -        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(MultiSearchResponse.class));
    +        verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class));
    +        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class));
         }
     
         public void testTypeInPath() {
    diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
    index 6d0480048982d..6c1a234b32cd9 100644
    --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
    +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java
    @@ -10,6 +10,7 @@
     import org.elasticsearch.action.search.SearchRequest;
     import org.elasticsearch.action.search.SearchResponse;
     import org.elasticsearch.action.search.SearchType;
    +import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.core.RestApiVersion;
     import org.elasticsearch.rest.RestRequest;
     import org.elasticsearch.search.builder.SearchSourceBuilder;
    @@ -21,13 +22,14 @@
     import org.elasticsearch.test.rest.RestActionTestCase;
     import org.elasticsearch.usage.UsageService;
     import org.junit.Before;
    -import org.mockito.Mockito;
     
     import java.util.Collections;
     import java.util.HashMap;
     import java.util.List;
     import java.util.Map;
     
    +import static org.mockito.Mockito.mock;
    +
     public final class RestSearchActionTests extends RestActionTestCase {
         final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7));
     
    @@ -35,10 +37,10 @@ public final class RestSearchActionTests extends RestActionTestCase {
     
         @Before
         public void setUpAction() {
    -        action = new RestSearchAction(new UsageService().getSearchUsageHolder());
    +        action = new RestSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class));
             controller().registerHandler(action);
    -        verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(SearchResponse.class));
    -        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(SearchResponse.class));
    +        verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchResponse.class));
    +        verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class));
         }
     
         public void testTypeInPath() {
    diff --git a/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java b/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java
    index 8b2759ff5a7a0..51f741e4f03fc 100644
    --- a/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java
    @@ -9,9 +9,12 @@
     package org.elasticsearch.search;
     
     import org.elasticsearch.action.search.ClearScrollResponse;
    +import org.elasticsearch.action.search.ClosePointInTimeResponse;
     import org.elasticsearch.common.bytes.BytesReference;
     import org.elasticsearch.common.xcontent.XContentHelper;
     import org.elasticsearch.test.ESTestCase;
    +import org.elasticsearch.xcontent.ConstructingObjectParser;
    +import org.elasticsearch.xcontent.ObjectParser;
     import org.elasticsearch.xcontent.ToXContent;
     import org.elasticsearch.xcontent.XContentBuilder;
     import org.elasticsearch.xcontent.XContentParser;
    @@ -21,9 +24,30 @@
     import java.io.IOException;
     
     import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
    +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
     
     public class ClearScrollResponseTests extends ESTestCase {
     
    +    private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
    +        "clear_scroll",
    +        true,
    +        a -> new ClosePointInTimeResponse((boolean) a[0], (int) a[1])
    +    );
    +    static {
    +        PARSER.declareField(
    +            constructorArg(),
    +            (parser, context) -> parser.booleanValue(),
    +            ClearScrollResponse.SUCCEEDED,
    +            ObjectParser.ValueType.BOOLEAN
    +        );
    +        PARSER.declareField(
    +            constructorArg(),
    +            (parser, context) -> parser.intValue(),
    +            ClearScrollResponse.NUMFREED,
    +            ObjectParser.ValueType.INT
    +        );
    +    }
    +
         public void testToXContent() throws IOException {
             ClearScrollResponse clearScrollResponse = new ClearScrollResponse(true, 10);
             try (XContentBuilder builder = JsonXContent.contentBuilder()) {
    @@ -39,7 +63,7 @@ public void testToAndFromXContent() throws IOException {
             BytesReference originalBytes = toShuffledXContent(originalResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
             ClearScrollResponse parsedResponse;
             try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
    -            parsedResponse = ClearScrollResponse.fromXContent(parser);
    +            parsedResponse = PARSER.parse(parser, null);
             }
             assertEquals(originalResponse.isSucceeded(), parsedResponse.isSucceeded());
             assertEquals(originalResponse.getNumFreed(), parsedResponse.getNumFreed());
    diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
    index 19f6400badcf5..79f16ab390dd2 100644
    --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java
    @@ -92,8 +92,6 @@ public void testAddingCancellationActions() throws IOException {
     
             Runnable r = () -> {};
             searcher.addQueryCancellation(r);
    -        IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> searcher.addQueryCancellation(r));
    -        assertEquals("Cancellation runnable already added", iae.getMessage());
         }
     
         public void testCancellableCollector() throws IOException {
    diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
    index e9bf6f83f5bbc..40bdc3da37242 100644
    --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
    @@ -15,6 +15,7 @@
     import org.elasticsearch.common.bytes.BytesArray;
     import org.elasticsearch.common.bytes.BytesReference;
     import org.elasticsearch.common.document.DocumentField;
    +import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.common.util.Maps;
     import org.elasticsearch.index.Index;
    @@ -143,7 +144,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp
     
         @Override
         protected Writeable.Reader instanceReader() {
    -        return SearchHit::readFrom;
    +        return in -> SearchHit.readFrom(in, randomBoolean());
         }
     
         @Override
    @@ -159,16 +160,20 @@ protected SearchHit mutateInstance(SearchHit instance) {
         public void testFromXContent() throws IOException {
             XContentType xContentType = randomFrom(XContentType.values()).canonical();
             SearchHit searchHit = createTestItem(xContentType, true, false);
    -        boolean humanReadable = randomBoolean();
    -        BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
    -        SearchHit parsed;
    -        try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
    -            parser.nextToken(); // jump to first START_OBJECT
    -            parsed = SearchHit.fromXContent(parser);
    -            assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
    -            assertNull(parser.nextToken());
    +        try {
    +            boolean humanReadable = randomBoolean();
    +            BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
    +            SearchHit parsed;
    +            try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
    +                parser.nextToken(); // jump to first START_OBJECT
    +                parsed = SearchHit.fromXContent(parser);
    +                assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
    +                assertNull(parser.nextToken());
    +            }
    +            assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
    +        } finally {
    +            searchHit.decRef();
             }
    -        assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
         }
     
         /**
    @@ -184,22 +189,26 @@ public void testFromXContent() throws IOException {
         public void testFromXContentLenientParsing() throws IOException {
             XContentType xContentType = randomFrom(XContentType.values());
             SearchHit searchHit = createTestItem(xContentType, true, true);
    -        BytesReference originalBytes = toXContent(searchHit, xContentType, true);
    -        Predicate pathsToExclude = path -> path.endsWith("highlight")
    -            || path.contains("fields")
    -            || path.contains("_source")
    -            || path.contains("inner_hits")
    -            || path.isEmpty();
    -        BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
    -
    -        SearchHit parsed;
    -        try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
    -            parser.nextToken(); // jump to first START_OBJECT
    -            parsed = SearchHit.fromXContent(parser);
    -            assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
    -            assertNull(parser.nextToken());
    +        try {
    +            BytesReference originalBytes = toXContent(searchHit, xContentType, true);
    +            Predicate pathsToExclude = path -> path.endsWith("highlight")
    +                || path.contains("fields")
    +                || path.contains("_source")
    +                || path.contains("inner_hits")
    +                || path.isEmpty();
    +            BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
    +
    +            SearchHit parsed;
    +            try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
    +                parser.nextToken(); // jump to first START_OBJECT
    +                parsed = SearchHit.fromXContent(parser);
    +                assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
    +                assertNull(parser.nextToken());
    +            }
    +            assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType);
    +        } finally {
    +            searchHit.decRef();
             }
    -        assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType);
         }
     
         /**
    @@ -221,15 +230,19 @@ public void testFromXContentWithoutTypeAndId() throws IOException {
     
         public void testToXContent() throws IOException {
             SearchHit searchHit = new SearchHit(1, "id1");
    -        searchHit.score(1.5f);
    -        XContentBuilder builder = JsonXContent.contentBuilder();
    -        searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
    -        assertEquals("""
    -            {"_id":"id1","_score":1.5}""", Strings.toString(builder));
    +        try {
    +            searchHit.score(1.5f);
    +            XContentBuilder builder = JsonXContent.contentBuilder();
    +            searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
    +            assertEquals("""
    +                {"_id":"id1","_score":1.5}""", Strings.toString(builder));
    +        } finally {
    +            searchHit.decRef();
    +        }
         }
     
         public void testRankToXContent() throws IOException {
    -        SearchHit searchHit = new SearchHit(1, "id1");
    +        SearchHit searchHit = SearchHit.unpooled(1, "id1");
             searchHit.setRank(1);
             XContentBuilder builder = JsonXContent.contentBuilder();
             searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
    @@ -264,30 +277,42 @@ public void testSerializeShardTarget() throws Exception {
             hit2.shard(target);
     
             SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f);
    -
    -        TransportVersion version = TransportVersionUtils.randomVersion(random());
    -        SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, version);
    -        SearchShardTarget deserializedTarget = results.getAt(0).getShard();
    -        assertThat(deserializedTarget, equalTo(target));
    -        assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue());
    -        assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue());
    -        assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue());
    -        assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue());
    -        for (SearchHit hit : results) {
    -            assertEquals(clusterAlias, hit.getClusterAlias());
    -            if (hit.getInnerHits() != null) {
    -                for (SearchHits innerhits : hit.getInnerHits().values()) {
    -                    for (SearchHit innerHit : innerhits) {
    -                        assertEquals(clusterAlias, innerHit.getClusterAlias());
    +        try {
    +            TransportVersion version = TransportVersionUtils.randomVersion(random());
    +            SearchHits results = copyWriteable(
    +                hits,
    +                getNamedWriteableRegistry(),
    +                (StreamInput in) -> SearchHits.readFrom(in, randomBoolean()),
    +                version
    +            );
    +            try {
    +                SearchShardTarget deserializedTarget = results.getAt(0).getShard();
    +                assertThat(deserializedTarget, equalTo(target));
    +                assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue());
    +                assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue());
    +                assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue());
    +                assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue());
    +                for (SearchHit hit : results) {
    +                    assertEquals(clusterAlias, hit.getClusterAlias());
    +                    if (hit.getInnerHits() != null) {
    +                        for (SearchHits innerhits : hit.getInnerHits().values()) {
    +                            for (SearchHit innerHit : innerhits) {
    +                                assertEquals(clusterAlias, innerHit.getClusterAlias());
    +                            }
    +                        }
                         }
                     }
    +                assertThat(results.getAt(1).getShard(), equalTo(target));
    +            } finally {
    +                results.decRef();
                 }
    +        } finally {
    +            hits.decRef();
             }
    -        assertThat(results.getAt(1).getShard(), equalTo(target));
         }
     
         public void testNullSource() {
    -        SearchHit searchHit = new SearchHit(0, "_id");
    +        SearchHit searchHit = SearchHit.unpooled(0, "_id");
     
             assertThat(searchHit.getSourceAsMap(), nullValue());
             assertThat(searchHit.getSourceRef(), nullValue());
    @@ -299,7 +324,7 @@ public void testNullSource() {
         }
     
         public void testHasSource() {
    -        SearchHit searchHit = new SearchHit(randomInt());
    +        SearchHit searchHit = SearchHit.unpooled(randomInt());
             assertFalse(searchHit.hasSource());
             searchHit.sourceRef(new BytesArray("{}"));
             assertTrue(searchHit.hasSource());
    @@ -376,7 +401,7 @@ public void testToXContentEmptyFields() throws IOException {
             Map fields = new HashMap<>();
             fields.put("foo", new DocumentField("foo", Collections.emptyList()));
             fields.put("bar", new DocumentField("bar", Collections.emptyList()));
    -        SearchHit hit = new SearchHit(0, "_id");
    +        SearchHit hit = SearchHit.unpooled(0, "_id");
             hit.addDocumentFields(fields, Map.of());
             {
                 BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean());
    @@ -389,13 +414,17 @@ public void testToXContentEmptyFields() throws IOException {
                     assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
                     assertNull(parser.nextToken());
                 }
    -            assertThat(parsed.getFields().size(), equalTo(0));
    +            try {
    +                assertThat(parsed.getFields().size(), equalTo(0));
    +            } finally {
    +                parsed.decRef();
    +            }
             }
     
             fields = new HashMap<>();
             fields.put("foo", new DocumentField("foo", Collections.emptyList()));
             fields.put("bar", new DocumentField("bar", Collections.singletonList("value")));
    -        hit = new SearchHit(0, "_id");
    +        hit = SearchHit.unpooled(0, "_id");
             hit.addDocumentFields(fields, Collections.emptyMap());
             {
                 BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean());
    @@ -412,7 +441,7 @@ public void testToXContentEmptyFields() throws IOException {
     
             Map metadata = new HashMap<>();
             metadata.put("_routing", new DocumentField("_routing", Collections.emptyList()));
    -        hit = new SearchHit(0, "_id");
    +        hit = SearchHit.unpooled(0, "_id");
             hit.addDocumentFields(fields, Collections.emptyMap());
             {
                 BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean());
    @@ -427,7 +456,13 @@ public void testToXContentEmptyFields() throws IOException {
                 assertThat(parsed.getFields().get("bar").getValues(), equalTo(Collections.singletonList("value")));
                 assertNull(parsed.getFields().get("_routing"));
             }
    +    }
     
    +    @Override
    +    protected void dispose(SearchHit searchHit) {
    +        if (searchHit != null) {
    +            searchHit.decRef();
    +        }
         }
     
         static Explanation createExplanation(int depth) {
    diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java
    index 1e720064dab56..4ca3c5b8dd46e 100644
    --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java
    @@ -118,7 +118,7 @@ protected SearchHits mutateInstance(SearchHits instance) {
                     } else {
                         totalHits = null;
                     }
    -                return new SearchHits(instance.getHits(), totalHits, instance.getMaxScore());
    +                return new SearchHits(instance.asUnpooled().getHits(), totalHits, instance.getMaxScore());
                 case 2:
                     final float maxScore;
                     if (Float.isNaN(instance.getMaxScore())) {
    @@ -126,7 +126,7 @@ protected SearchHits mutateInstance(SearchHits instance) {
                     } else {
                         maxScore = Float.NaN;
                     }
    -                return new SearchHits(instance.getHits(), instance.getTotalHits(), maxScore);
    +                return new SearchHits(instance.asUnpooled().getHits(), instance.getTotalHits(), maxScore);
                 case 3:
                     SortField[] sortFields;
                     if (instance.getSortFields() == null) {
    @@ -135,7 +135,7 @@ protected SearchHits mutateInstance(SearchHits instance) {
                         sortFields = randomBoolean() ? createSortFields(instance.getSortFields().length + 1) : null;
                     }
                     return new SearchHits(
    -                    instance.getHits(),
    +                    instance.asUnpooled().getHits(),
                         instance.getTotalHits(),
                         instance.getMaxScore(),
                         sortFields,
    @@ -150,7 +150,7 @@ protected SearchHits mutateInstance(SearchHits instance) {
                         collapseField = randomBoolean() ? instance.getCollapseField() + randomAlphaOfLengthBetween(2, 5) : null;
                     }
                     return new SearchHits(
    -                    instance.getHits(),
    +                    instance.asUnpooled().getHits(),
                         instance.getTotalHits(),
                         instance.getMaxScore(),
                         instance.getSortFields(),
    @@ -165,7 +165,7 @@ protected SearchHits mutateInstance(SearchHits instance) {
                         collapseValues = randomBoolean() ? createCollapseValues(instance.getCollapseValues().length + 1) : null;
                     }
                     return new SearchHits(
    -                    instance.getHits(),
    +                    instance.asUnpooled().getHits(),
                         instance.getTotalHits(),
                         instance.getMaxScore(),
                         instance.getSortFields(),
    @@ -177,6 +177,11 @@ protected SearchHits mutateInstance(SearchHits instance) {
             }
         }
     
    +    @Override
    +    protected void dispose(SearchHits searchHits) {
    +        searchHits.decRef();
    +    }
    +
         @Override
         protected Predicate getRandomFieldsExcludeFilter() {
             return path -> (path.isEmpty()
    @@ -193,7 +198,7 @@ protected String[] getShuffleFieldsExceptions() {
     
         @Override
         protected Writeable.Reader instanceReader() {
    -        return SearchHits::new;
    +        return in -> SearchHits.readFrom(in, randomBoolean());
         }
     
         @Override
    @@ -223,15 +228,19 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException {
             SearchHits searchHits = SearchHits.fromXContent(parser);
             assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
             assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
    -        return searchHits;
    +        try {
    +            return searchHits.asUnpooled();
    +        } finally {
    +            searchHits.decRef();
    +        }
         }
     
         public void testToXContent() throws IOException {
    -        SearchHit[] hits = new SearchHit[] { new SearchHit(1, "id1"), new SearchHit(2, "id2") };
    +        SearchHit[] hits = new SearchHit[] { SearchHit.unpooled(1, "id1"), SearchHit.unpooled(2, "id2") };
     
             long totalHits = 1000;
             float maxScore = 1.5f;
    -        SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore);
    +        SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore);
             XContentBuilder builder = JsonXContent.contentBuilder();
             builder.startObject();
             ChunkedToXContent.wrapAsToXContent(searchHits).toXContent(builder, ToXContent.EMPTY_PARAMS);
    @@ -251,7 +260,10 @@ public void testToXContent() throws IOException {
     
         public void testFromXContentWithShards() throws IOException {
             for (boolean withExplanation : new boolean[] { true, false }) {
    -            final SearchHit[] hits = new SearchHit[] { new SearchHit(1, "id1"), new SearchHit(2, "id2"), new SearchHit(10, "id10") };
    +            final SearchHit[] hits = new SearchHit[] {
    +                SearchHit.unpooled(1, "id1"),
    +                SearchHit.unpooled(2, "id2"),
    +                SearchHit.unpooled(10, "id10") };
     
                 for (SearchHit hit : hits) {
                     String index = randomAlphaOfLengthBetween(5, 10);
    @@ -269,7 +281,7 @@ public void testFromXContentWithShards() throws IOException {
     
                 long totalHits = 1000;
                 float maxScore = 1.5f;
    -            SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore);
    +            SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore);
                 XContentType xContentType = randomFrom(XContentType.values()).canonical();
                 BytesReference bytes = toShuffledXContent(
                     ChunkedToXContent.wrapAsToXContent(searchHits),
    @@ -304,7 +316,6 @@ public void testFromXContentWithShards() throws IOException {
                         }
                     }
                 }
    -
             }
         }
     }
    diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
    index 57974cff0d03c..6a8ac3d1aa876 100644
    --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java
    @@ -425,6 +425,7 @@ public CheckedBiConsumer getReque
             "combined_fields",
             "dis_max",
             "exists",
    +        "exact_knn",
             "function_score",
             "fuzzy",
             "geo_bounding_box",
    diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
    index 87299e0f5645d..bd4475ac07705 100644
    --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
    @@ -8,11 +8,6 @@
     
     package org.elasticsearch.search.aggregations;
     
    -import org.elasticsearch.common.ParsingException;
    -import org.elasticsearch.common.bytes.BytesReference;
    -import org.elasticsearch.common.xcontent.XContentHelper;
    -import org.elasticsearch.rest.action.search.RestSearchAction;
    -import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
     import org.elasticsearch.search.aggregations.bucket.composite.InternalCompositeTests;
     import org.elasticsearch.search.aggregations.bucket.filter.InternalFilterTests;
     import org.elasticsearch.search.aggregations.bucket.filter.InternalFiltersTests;
    @@ -63,26 +58,11 @@
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.InternalAggregationTestCase;
     import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
    -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
    -import org.elasticsearch.xcontent.NamedXContentRegistry;
    -import org.elasticsearch.xcontent.ToXContent;
    -import org.elasticsearch.xcontent.XContent;
    -import org.elasticsearch.xcontent.XContentBuilder;
    -import org.elasticsearch.xcontent.XContentFactory;
    -import org.elasticsearch.xcontent.XContentParser;
    -import org.elasticsearch.xcontent.XContentType;
     import org.junit.After;
     import org.junit.Before;
     
    -import java.io.IOException;
     import java.util.ArrayList;
     import java.util.List;
    -import java.util.Set;
    -import java.util.function.Predicate;
    -import java.util.stream.Collectors;
    -
    -import static java.util.Collections.singletonMap;
    -import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
     
     /**
      * This class tests that aggregations parsing works properly. It checks that we can parse
    @@ -141,11 +121,6 @@ public class AggregationsTests extends ESTestCase {
             new InternalMedianAbsoluteDeviationTests()
         );
     
    -    @Override
    -    protected NamedXContentRegistry xContentRegistry() {
    -        return new NamedXContentRegistry(InternalAggregationTestCase.getDefaultNamedXContents());
    -    }
    -
         @Before
         public void init() throws Exception {
             for (InternalAggregationTestCase aggsTest : aggsTests) {
    @@ -165,99 +140,6 @@ public void cleanUp() throws Exception {
             }
         }
     
    -    public void testAllAggsAreBeingTested() {
    -        assertEquals(InternalAggregationTestCase.getDefaultNamedXContents().size(), aggsTests.size());
    -        Set aggs = aggsTests.stream().map((testCase) -> testCase.createTestInstance().getType()).collect(Collectors.toSet());
    -        for (NamedXContentRegistry.Entry entry : InternalAggregationTestCase.getDefaultNamedXContents()) {
    -            assertTrue(aggs.contains(entry.name.getPreferredName()));
    -        }
    -    }
    -
    -    public void testFromXContent() throws IOException {
    -        parseAndAssert(false);
    -    }
    -
    -    public void testFromXContentWithRandomFields() throws IOException {
    -        parseAndAssert(true);
    -    }
    -
    -    /**
    -     * Test that parsing works for a randomly created Aggregations object with a
    -     * randomized aggregation tree. The test randomly chooses an
    -     * {@link XContentType}, randomizes the order of the {@link XContent} fields
    -     * and randomly sets the `humanReadable` flag when rendering the
    -     * {@link XContent}.
    -     *
    -     * @param addRandomFields
    -     *            if set, this will also add random {@link XContent} fields to
    -     *            tests that the parsers are lenient to future additions to rest
    -     *            responses
    -     */
    -    private void parseAndAssert(boolean addRandomFields) throws IOException {
    -        XContentType xContentType = randomFrom(XContentType.values());
    -        final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
    -        Aggregations aggregations = createTestInstance();
    -        BytesReference originalBytes = toShuffledXContent(aggregations, xContentType, params, randomBoolean());
    -        BytesReference mutated;
    -        if (addRandomFields) {
    -            /*
    -             * - don't insert into the root object because it should only contain the named aggregations to test
    -             *
    -             * - don't insert into the "meta" object, because we pass on everything we find there
    -             *
    -             * - we don't want to directly insert anything random into "buckets"  objects, they are used with
    -             * "keyed" aggregations and contain named bucket objects. Any new named object on this level should
    -             * also be a bucket and be parsed as such.
    -             *
    -             * - we cannot insert randomly into VALUE or VALUES objects e.g. in Percentiles, the keys need to be numeric there
    -             *
    -             * - we cannot insert into ExtendedMatrixStats "covariance" or "correlation" fields, their syntax is strict
    -             *
    -             * - we cannot insert random values in top_hits, as all unknown fields
    -             * on a root level of SearchHit are interpreted as meta-fields and will be kept
    -             *
    -             * - exclude "key", it can be an array of objects and we need strict values
    -             */
    -            Predicate excludes = path -> (path.isEmpty()
    -                || path.endsWith("aggregations")
    -                || path.endsWith(Aggregation.CommonFields.META.getPreferredName())
    -                || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName())
    -                || path.endsWith(CommonFields.VALUES.getPreferredName())
    -                || path.endsWith("covariance")
    -                || path.endsWith("correlation")
    -                || path.contains(CommonFields.VALUE.getPreferredName())
    -                || path.endsWith(CommonFields.KEY.getPreferredName())) || path.contains("top_hits");
    -            mutated = insertRandomFields(xContentType, originalBytes, excludes, random());
    -        } else {
    -            mutated = originalBytes;
    -        }
    -        try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
    -            assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
    -            assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
    -            assertEquals(Aggregations.AGGREGATIONS_FIELD, parser.currentName());
    -            assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
    -            Aggregations parsedAggregations = Aggregations.fromXContent(parser);
    -            BytesReference parsedBytes = XContentHelper.toXContent(parsedAggregations, xContentType, randomBoolean());
    -            ElasticsearchAssertions.assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
    -        }
    -    }
    -
    -    public void testParsingExceptionOnUnknownAggregation() throws IOException {
    -        XContentBuilder builder = XContentFactory.jsonBuilder();
    -        builder.startObject();
    -        {
    -            builder.startObject("unknownAggregation");
    -            builder.endObject();
    -        }
    -        builder.endObject();
    -        BytesReference originalBytes = BytesReference.bytes(builder);
    -        try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) {
    -            assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
    -            ParsingException ex = expectThrows(ParsingException.class, () -> Aggregations.fromXContent(parser));
    -            assertEquals("Could not parse aggregation keyed as [unknownAggregation]", ex.getMessage());
    -        }
    -    }
    -
         public final InternalAggregations createTestInstance() {
             return createTestInstance(1, 0, 5);
         }
    diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java
    index 70378268dde30..b5927d71bd782 100644
    --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java
    @@ -247,7 +247,7 @@ public void testNonFinalReduceTopLevelPipelineAggs() {
             );
             List aggs = singletonList(InternalAggregations.from(Collections.singletonList(terms)));
             InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(aggs, maxBucketReduceContext().forPartialReduction());
    -        assertEquals(1, reducedAggs.aggregations.size());
    +        assertEquals(1, reducedAggs.asList().size());
         }
     
         public void testFinalReduceTopLevelPipelineAggs() {
    @@ -268,7 +268,7 @@ public void testFinalReduceTopLevelPipelineAggs() {
     
             InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(terms));
             InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(List.of(aggs), maxBucketReduceContext().forFinalReduction());
    -        assertEquals(2, reducedAggs.aggregations.size());
    +        assertEquals(2, reducedAggs.asList().size());
         }
     
         private AggregationReduceContext.Builder maxBucketReduceContext() {
    @@ -317,7 +317,7 @@ private void writeToAndReadFrom(InternalAggregations aggregations, TransportVers
             try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(serializedAggs.bytes), registry)) {
                 in.setTransportVersion(version);
                 InternalAggregations deserialized = InternalAggregations.readFrom(in);
    -            assertEquals(aggregations.aggregations, deserialized.aggregations);
    +            assertEquals(aggregations.asList(), deserialized.asList());
                 if (iteration < 2) {
                     writeToAndReadFrom(deserialized, version, iteration + 1);
                 }
    diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
    index e95bc3a460133..ac754f4ce9cc7 100644
    --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
    +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
    @@ -15,7 +15,6 @@
     import org.elasticsearch.search.DocValueFormat;
     import org.elasticsearch.search.aggregations.InternalAggregation;
     import org.elasticsearch.search.aggregations.InternalAggregations;
    -import org.elasticsearch.search.aggregations.ParsedAggregation;
     import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
     import org.junit.After;
     
    @@ -105,19 +104,6 @@ public void tearDown() throws Exception {
             types = null;
         }
     
    -    @Override
    -    protected Class implementationClass() {
    -        return ParsedComposite.class;
    -    }
    -
    -    protected 

    P parseAndAssert( - final InternalAggregation aggregation, - final boolean shuffled, - final boolean addRandomFields - ) throws IOException { - return super.parseAndAssert(aggregation, false, false); - } - private CompositeKey createCompositeKey() { Comparable[] keys = new Comparable[sourceNames.size()]; for (int j = 0; j < keys.length; j++) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java index e4b530f076fdd..d6a4ab692df83 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.SamplingContext; @@ -51,11 +50,6 @@ protected void extraAssertReduced(InternalFilter reduced, List i // Nothing extra to assert } - @Override - protected Class implementationClass() { - return ParsedFilter.class; - } - public void testReducePipelinesReturnsSameInstanceWithoutPipelines() { InternalFilter test = createTestInstance(); assertThat(test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), PipelineTree.EMPTY), sameInstance(test)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index 11d39fe8c8de0..ce7e561fb4891 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -84,11 +84,6 @@ protected void assertReduced(InternalFilters reduced, List inpu assertEquals(expectedCounts, actualCounts); } - @Override - protected Class implementationClass() { - return ParsedFilters.class; - } - @Override protected InternalFilters mutateInstance(InternalFilters instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java index 5237ebe400e87..cc52032ba5df8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; import java.util.List; import java.util.Map; @@ -30,9 +29,4 @@ protected InternalGlobal createTestInstance( protected void extraAssertReduced(InternalGlobal reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedGlobal.class; - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index 093ccc7181767..9d50b5b65e45b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -109,7 +109,7 @@ private InternalDateHistogram createTestInstance( } } BucketOrder order = BucketOrder.key(randomBoolean()); - return new InternalDateHistogram(name, buckets, order, minDocCount, 0L, emptyBucketInfo, format, keyed, metadata); + return new InternalDateHistogram(name, buckets, order, minDocCount, 0L, emptyBucketInfo, format, keyed, false, metadata); } @Override @@ -166,11 +166,6 @@ protected void assertReduced(InternalDateHistogram reduced, List implementationClass() { - return ParsedDateHistogram.class; - } - @Override protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { String name = instance.getName(); @@ -210,7 +205,7 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { } default -> throw new AssertionError("Illegal randomisation branch"); } - return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format, keyed, metadata); + return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format, keyed, false, metadata); } public void testLargeReduce() { @@ -230,6 +225,7 @@ public void testLargeReduce() { ), DocValueFormat.RAW, false, + false, null ); expectReduceUsesTooManyBuckets(largeHisto, 100000); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index 81e1c7014d9ba..09d309f02c6ee 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -158,11 +158,6 @@ protected void assertReduced(InternalHistogram reduced, List assertEquals(expectedCounts, actualCounts); } - @Override - protected Class implementationClass() { - return ParsedHistogram.class; - } - @Override protected InternalHistogram mutateInstance(InternalHistogram instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java index 934104bcae69b..4d0ba751b2f84 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java @@ -92,11 +92,6 @@ protected InternalVariableWidthHistogram createTestInstance( return new InternalVariableWidthHistogram(name, buckets, emptyBucktInfo, numBuckets, format, metaData); } - @Override - protected Class implementationClass() { - return ParsedVariableWidthHistogram.class; - } - @Override protected InternalVariableWidthHistogram mutateInstance(InternalVariableWidthHistogram instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index bbeeb855f8d18..99be8590e06f2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -11,10 +11,14 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; @@ -603,7 +607,10 @@ private void testSearchCase( final Consumer verify ) throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(random(), new MockAnalyzer(random())); + // Use LogDocMergePolicy to avoid randomization issues with the doc retrieval order. + config.setMergePolicy(new LogDocMergePolicy()); + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) { indexSampleData(dataset, indexWriter, multipleSegments); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java index b66175450ede0..bcaaefea8eda0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; import java.util.List; import java.util.Map; @@ -30,9 +29,4 @@ protected InternalMissing createTestInstance( protected void extraAssertReduced(InternalMissing reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedMissing.class; - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java index b9370d365f734..14a462b9ee7c4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java @@ -10,10 +10,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -32,15 +29,4 @@ protected InternalNested createTestInstance( protected void extraAssertReduced(InternalNested reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedNested.class; - } - - @Override - protected void assertFromXContent(InternalNested aggregation, ParsedAggregation parsedAggregation) throws IOException { - super.assertFromXContent(aggregation, parsedAggregation); - assertTrue(parsedAggregation instanceof Nested); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java index 57405379e5432..e2e4ca95bc077 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java @@ -10,10 +10,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -32,15 +29,4 @@ protected InternalReverseNested createTestInstance( protected void extraAssertReduced(InternalReverseNested reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedReverseNested.class; - } - - @Override - protected void assertFromXContent(InternalReverseNested aggregation, ParsedAggregation parsedAggregation) throws IOException { - super.assertFromXContent(aggregation, parsedAggregation); - assertTrue(parsedAggregation instanceof ReverseNested); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java index 61f43bf152e0b..eaa2beed52627 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import org.elasticsearch.test.MapMatcher; @@ -38,12 +37,6 @@ protected InternalIpPrefix createTestInstance(String name, Map m return createTestInstance(name, metadata, aggregations, randomPrefixLength(), randomMinDocCount()); } - @Override - protected Class> implementationClass() { - // Deprecated high level rest client not supported - return null; - } - private int randomPrefixLength() { return between(1, InetAddressPoint.BYTES * 8); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index 1140411a3b93e..f114041448a32 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import java.util.ArrayList; import java.util.Arrays; @@ -77,11 +76,6 @@ protected InternalBinaryRange createTestInstance( return new InternalBinaryRange(name, format, keyed, buckets, metadata); } - @Override - protected Class implementationClass() { - return ParsedBinaryRange.class; - } - @Override protected void assertReduced(InternalBinaryRange reduced, List inputs) { int pos = 0; @@ -103,11 +97,6 @@ protected Class interna return InternalBinaryRange.Bucket.class; } - @Override - protected Class parsedRangeBucketClass() { - return ParsedBinaryRange.ParsedBucket.class; - } - @Override protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index e7b9513c2d9fd..d192ff161a2e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -86,21 +85,11 @@ protected InternalDateRange createTestInstance( return new InternalDateRange(name, buckets, format, keyed, metadata); } - @Override - protected Class implementationClass() { - return ParsedDateRange.class; - } - @Override protected Class internalRangeBucketClass() { return InternalDateRange.Bucket.class; } - @Override - protected Class parsedRangeBucketClass() { - return ParsedDateRange.ParsedBucket.class; - } - @Override protected InternalDateRange mutateInstance(InternalDateRange instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 3194e343d2082..c9f58de9ccef3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import java.util.ArrayList; import java.util.Collections; @@ -68,21 +67,11 @@ protected InternalGeoDistance createTestInstance( return new InternalGeoDistance(name, buckets, keyed, metadata); } - @Override - protected Class implementationClass() { - return ParsedGeoDistance.class; - } - @Override protected Class internalRangeBucketClass() { return InternalGeoDistance.Bucket.class; } - @Override - protected Class parsedRangeBucketClass() { - return ParsedGeoDistance.ParsedBucket.class; - } - @Override protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java index 38b55a33d5a98..b176add25e597 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java @@ -11,8 +11,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import java.util.List; @@ -60,28 +58,5 @@ protected void assertReduced(T reduced, List inputs) { assertEquals(expectedCounts, actualCounts); } - @Override - protected final void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) { - super.assertBucket(expected, actual, checkOrder); - - Class internalBucketClass = internalRangeBucketClass(); - assertNotNull("Internal bucket class must not be null", internalBucketClass); - assertTrue(internalBucketClass.isInstance(expected)); - - Class parsedBucketClass = parsedRangeBucketClass(); - assertNotNull("Parsed bucket class must not be null", parsedBucketClass); - assertTrue(parsedBucketClass.isInstance(actual)); - - Range.Bucket expectedRange = (Range.Bucket) expected; - Range.Bucket actualRange = (Range.Bucket) actual; - - assertEquals(expectedRange.getFrom(), actualRange.getFrom()); - assertEquals(expectedRange.getFromAsString(), actualRange.getFromAsString()); - assertEquals(expectedRange.getTo(), actualRange.getTo()); - assertEquals(expectedRange.getToAsString(), actualRange.getToAsString()); - } - protected abstract Class internalRangeBucketClass(); - - protected abstract Class parsedRangeBucketClass(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index b4003f55693fa..bca19536c5485 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import java.util.ArrayList; import java.util.Collections; @@ -81,21 +80,11 @@ public void setUp() throws Exception { return new InternalRange<>(name, buckets, format, keyed, metadata); } - @Override - protected Class implementationClass() { - return ParsedRange.class; - } - @Override protected Class internalRangeBucketClass() { return InternalRange.Bucket.class; } - @Override - protected Class parsedRangeBucketClass() { - return ParsedRange.ParsedBucket.class; - } - @Override protected InternalRange mutateInstance(InternalRange instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java index e94ccf26d756a..78b175877f4d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; import java.util.List; import java.util.Map; @@ -29,9 +28,4 @@ protected InternalSampler createTestInstance( protected void extraAssertReduced(InternalSampler reduced, List inputs) { // Nothing extra to assert } - - @Override - protected Class implementationClass() { - return ParsedSampler.class; - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java index d0f8c663e2772..cfa67ee2ded62 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java @@ -63,11 +63,6 @@ public class DoubleTermsTests extends InternalTermsTestCase { ); } - @Override - protected Class implementationClass() { - return ParsedDoubleTerms.class; - } - @Override protected InternalTerms mutateInstance(InternalTerms instance) { if (instance instanceof DoubleTerms doubleTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java index aea0a7dccde8b..2ee9fdc62225e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.ChiSquare; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.GND; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.JLHScore; @@ -144,42 +143,6 @@ protected void assertReduced(InternalSignificantTerms reduced, List expectedSigTerms = (InternalSignificantTerms) expected; - ParsedSignificantTerms actualSigTerms = (ParsedSignificantTerms) actual; - assertEquals(expectedSigTerms.getSubsetSize(), actualSigTerms.getSubsetSize()); - assertEquals(expectedSigTerms.getSupersetSize(), actualSigTerms.getSupersetSize()); - - for (SignificantTerms.Bucket bucket : (SignificantTerms) expected) { - String key = bucket.getKeyAsString(); - assertBucket(expectedSigTerms.getBucketByKey(key), actualSigTerms.getBucketByKey(key), checkOrder); - } - } - - @Override - protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) { - super.assertBucket(expected, actual, checkOrder); - - assertTrue(expected instanceof InternalSignificantTerms.Bucket); - assertTrue(actual instanceof ParsedSignificantTerms.ParsedBucket); - - SignificantTerms.Bucket expectedSigTerm = (SignificantTerms.Bucket) expected; - SignificantTerms.Bucket actualSigTerm = (SignificantTerms.Bucket) actual; - - assertEquals(expectedSigTerm.getSignificanceScore(), actualSigTerm.getSignificanceScore(), 0.0); - assertEquals(expectedSigTerm.getSubsetDf(), actualSigTerm.getSubsetDf()); - assertEquals(expectedSigTerm.getDocCount(), actualSigTerm.getSubsetDf()); - assertEquals(expectedSigTerm.getSupersetDf(), actualSigTerm.getSupersetDf()); - assertEquals(expectedSigTerm.getSubsetSize(), actualSigTerm.getSubsetSize()); - assertEquals(expectedSigTerm.getSupersetSize(), actualSigTerm.getSupersetSize()); - } - private static Map toCounts( Stream buckets, Function fn diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java index a0bd5dc2b30a1..44cfbecbfe1a2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java @@ -42,11 +42,6 @@ public class LongRareTermsTests extends InternalRareTermsTestCase { return new LongRareTerms(name, order, metadata, format, buckets, maxDocCount, filter); } - @Override - protected Class implementationClass() { - return ParsedLongRareTerms.class; - } - @Override protected InternalRareTerms mutateInstance(InternalRareTerms instance) { if (instance instanceof LongRareTerms longRareTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java index 5e24ff6edfc42..5675f4f67e6c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java @@ -63,11 +63,6 @@ public class LongTermsTests extends InternalTermsTestCase { ); } - @Override - protected Class implementationClass() { - return ParsedLongTerms.class; - } - @Override protected InternalTerms mutateInstance(InternalTerms instance) { if (instance instanceof LongTerms longTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index ad7a6c47ef5e4..2d240f74b91a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -43,9 +43,9 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; @@ -178,7 +178,7 @@ public void testEmbeddedMaxAgg() throws IOException { assertThat(bucket.getKey(), equalTo(1L)); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_max")); assertThat(((Max) (children.asList().get(0))).value(), equalTo(1.0)); @@ -192,7 +192,7 @@ public void testEmbeddedMaxAgg() throws IOException { assertThat(bucket.getKey(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_max")); assertThat(((Max) (children.asList().get(0))).value(), equalTo(1.0)); @@ -292,7 +292,7 @@ public void testNestedTerms() throws IOException { assertThat(bucket.getKey(), equalTo(1L)); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_terms")); assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); @@ -308,7 +308,7 @@ public void testNestedTerms() throws IOException { assertThat(bucket.getKey(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_terms")); assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java index 8d8832296f3f6..407d8422ccd58 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java @@ -64,11 +64,6 @@ public void setUp() throws Exception { return new SignificantLongTerms(name, requiredSize, 1L, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets); } - @Override - protected Class implementationClass() { - return ParsedSignificantLongTerms.class; - } - @Override protected InternalSignificantTerms mutateInstance(InternalSignificantTerms instance) { if (instance instanceof SignificantLongTerms longTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java index da009c60e2333..19338356cf29a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java @@ -67,11 +67,6 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas ); } - @Override - protected Class implementationClass() { - return ParsedSignificantStringTerms.class; - } - @Override protected InternalSignificantTerms mutateInstance(InternalSignificantTerms instance) { if (instance instanceof SignificantStringTerms stringTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java index 607e3ad2e6550..16200825367b4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java @@ -46,11 +46,6 @@ public class StringRareTermsTests extends InternalRareTermsTestCase { return new StringRareTerms(name, order, metadata, format, buckets, maxDocCount, filter); } - @Override - protected Class implementationClass() { - return ParsedStringRareTerms.class; - } - @Override protected InternalRareTerms mutateInstance(InternalRareTerms instance) { if (instance instanceof StringRareTerms stringRareTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java index 971314012e0f4..c97376724654c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java @@ -45,11 +45,6 @@ public class StringTermsTests extends InternalTermsTestCase { return new BuilderAndToReduce<>(mockBuilder(inputs), inputs); } - @Override - protected Class implementationClass() { - return ParsedStringTerms.class; - } - @Override protected InternalTerms mutateInstance(InternalTerms instance) { if (instance instanceof StringTerms stringTerms) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java index 742ebac25f855..1ae0f596aaac0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.test.InternalAggregationTestCase; @@ -25,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.NoSuchElementException; -import java.util.function.Predicate; import java.util.stream.Stream; import static java.util.Collections.emptyMap; @@ -70,19 +68,6 @@ protected abstract T createTestInstance( boolean empty ); - protected abstract Class implementationClass(); - - public void testPercentilesIterators() throws IOException { - final T aggregation = createTestInstance(); - final Iterable parsedAggregation = parseAndAssert(aggregation, false, false); - - Iterator it = aggregation.iterator(); - Iterator parsedIt = parsedAggregation.iterator(); - while (it.hasNext()) { - assertEquals(it.next(), parsedIt.next()); - } - } - public static double[] randomPercents(boolean sorted) { List randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d); double[] percents = new double[randomCdfValues.size()]; @@ -95,11 +80,6 @@ public static double[] randomPercents(boolean sorted) { return percents; } - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); - } - protected abstract void assertPercentile(T agg, Double value); public void testEmptyRanksXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java index 83e27a7c11d03..fa49ac2abfcb7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -91,16 +90,6 @@ private void verifyAvgOfDoubles(double[] values, double expected, double delta) assertEquals(expected, reduced.getValue(), delta); } - @Override - protected void assertFromXContent(InternalAvg avg, ParsedAggregation parsedAggregation) { - ParsedAvg parsed = ((ParsedAvg) parsedAggregation); - assertEquals(avg.getValue(), parsed.getValue(), Double.MIN_VALUE); - // we don't print out VALUE_AS_STRING for avg.getCount() == 0, so we cannot get the exact same value back - if (avg.getCount() != 0) { - assertEquals(avg.getValueAsString(), parsed.getValueAsString()); - } - } - @Override protected InternalAvg mutateInstance(InternalAvg instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java index 940cf37afdf75..65e34160dad64 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; import org.junit.After; @@ -86,15 +85,6 @@ protected void assertReduced(InternalCardinality reduced, List 0 ? aggregation.getSumOfSquares() : 0, parsed.getSumOfSquares(), 0); - assertEquals(count > 0 ? aggregation.getVariance() : 0, parsed.getVariance(), 0); - assertEquals(count > 0 ? aggregation.getVariancePopulation() : 0, parsed.getVariancePopulation(), 0); - assertEquals(count > 0 ? aggregation.getVarianceSampling() : 0, parsed.getVarianceSampling(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviation() : 0, parsed.getStdDeviation(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationPopulation() : 0, parsed.getStdDeviationPopulation(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationSampling() : 0, parsed.getStdDeviationSampling(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER) : 0, parsed.getStdDeviationBound(Bounds.LOWER), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER) : 0, parsed.getStdDeviationBound(Bounds.UPPER), 0); - assertEquals( - count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_POPULATION) : 0, - parsed.getStdDeviationBound(Bounds.LOWER_POPULATION), - 0 - ); - assertEquals( - count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_POPULATION) : 0, - parsed.getStdDeviationBound(Bounds.UPPER_POPULATION), - 0 - ); - assertEquals( - count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_SAMPLING) : 0, - parsed.getStdDeviationBound(Bounds.LOWER_SAMPLING), - 0 - ); - assertEquals( - count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_SAMPLING) : 0, - parsed.getStdDeviationBound(Bounds.UPPER_SAMPLING), - 0 - ); - // also as_string values are only rendered for count != 0 - if (count > 0) { - assertEquals(aggregation.getSumOfSquaresAsString(), parsed.getSumOfSquaresAsString()); - assertEquals(aggregation.getVarianceAsString(), parsed.getVarianceAsString()); - assertEquals(aggregation.getVariancePopulationAsString(), parsed.getVariancePopulationAsString()); - assertEquals(aggregation.getVarianceSamplingAsString(), parsed.getVarianceSamplingAsString()); - assertEquals(aggregation.getStdDeviationAsString(), parsed.getStdDeviationAsString()); - assertEquals(aggregation.getStdDeviationPopulationAsString(), parsed.getStdDeviationPopulationAsString()); - assertEquals(aggregation.getStdDeviationSamplingAsString(), parsed.getStdDeviationSamplingAsString()); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.LOWER), parsed.getStdDeviationBoundAsString(Bounds.LOWER)); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.UPPER), parsed.getStdDeviationBoundAsString(Bounds.UPPER)); - assertEquals( - aggregation.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION), - parsed.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION) - ); - assertEquals( - aggregation.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION), - parsed.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION) - ); - assertEquals( - aggregation.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING), - parsed.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING) - ); - assertEquals( - aggregation.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING), - parsed.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING) - ); - } - } - @Override protected InternalExtendedStats mutateInstance(InternalExtendedStats instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java index 72622c8e0dd3d..129ecdb9b9f59 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -99,15 +98,6 @@ protected void assertSampled(InternalGeoBounds sampled, InternalGeoBounds reduce assertValueClose(sampled.negRight, reduced.negRight); } - @Override - protected void assertFromXContent(InternalGeoBounds aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedGeoBounds); - ParsedGeoBounds parsed = (ParsedGeoBounds) parsedAggregation; - - assertEquals(aggregation.topLeft(), parsed.topLeft()); - assertEquals(aggregation.bottomRight(), parsed.bottomRight()); - } - @Override protected InternalGeoBounds mutateInstance(InternalGeoBounds instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java index 7cc7209ca0ea8..2afa3b2f86cf9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; @@ -68,9 +67,11 @@ protected boolean supportsSampling() { @Override protected void assertSampled(InternalGeoCentroid sampled, InternalGeoCentroid reduced, SamplingContext samplingContext) { - assertEquals(sampled.centroid().getY(), reduced.centroid().getY(), 1e-12); - assertEquals(sampled.centroid().getX(), reduced.centroid().getX(), 1e-12); assertEquals(sampled.count(), samplingContext.scaleUp(reduced.count()), 0); + if (sampled.count() > 0) { + assertEquals(sampled.centroid().getY(), reduced.centroid().getY(), 1e-12); + assertEquals(sampled.centroid().getX(), reduced.centroid().getX(), 1e-12); + } } public void testReduceMaxCount() { @@ -84,15 +85,6 @@ public void testReduceMaxCount() { assertThat(reducedGeoCentroid.count(), equalTo(Long.MAX_VALUE)); } - @Override - protected void assertFromXContent(InternalGeoCentroid aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedGeoCentroid); - ParsedGeoCentroid parsed = (ParsedGeoCentroid) parsedAggregation; - - assertEquals(aggregation.centroid(), parsed.centroid()); - assertEquals(aggregation.count(), parsed.count()); - } - @Override protected InternalGeoCentroid mutateInstance(InternalGeoCentroid instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java index ac43929878f47..ecd120ef849b2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java @@ -67,11 +67,6 @@ protected void assertSampled(InternalHDRPercentileRanks sampled, InternalHDRPerc } } - @Override - protected Class implementationClass() { - return ParsedHDRPercentileRanks.class; - } - @Override protected InternalHDRPercentileRanks mutateInstance(InternalHDRPercentileRanks instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java index e5580f7cdca68..d4b0eb5c67646 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java @@ -69,11 +69,6 @@ protected void assertSampled(InternalHDRPercentiles sampled, InternalHDRPercenti } } - @Override - protected Class implementationClass() { - return ParsedHDRPercentiles.class; - } - public void testIterator() { final double[] percents = randomPercents(false); final double[] values = new double[frequently() ? randomIntBetween(1, 10) : 0]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java index 190ab9914d933..59834d056557c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java @@ -9,11 +9,9 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -64,14 +62,6 @@ protected void assertSampled( assertThat(sampled.getMedianAbsoluteDeviation(), equalTo(reduced.getMedianAbsoluteDeviation())); } - @Override - protected void assertFromXContent(InternalMedianAbsoluteDeviation internalMAD, ParsedAggregation parsedAggregation) throws IOException { - assertTrue(parsedAggregation instanceof ParsedMedianAbsoluteDeviation); - ParsedMedianAbsoluteDeviation parsedMAD = (ParsedMedianAbsoluteDeviation) parsedAggregation; - // Double.compare handles NaN, which we use for no result - assertEquals(internalMAD.getMedianAbsoluteDeviation(), parsedMAD.getMedianAbsoluteDeviation(), 0); - } - @Override protected InternalMedianAbsoluteDeviation mutateInstance(InternalMedianAbsoluteDeviation instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java index 86b261ac0eb7f..8051fbe68811f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java @@ -9,28 +9,12 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import static org.hamcrest.Matchers.equalTo; public abstract class InternalPercentilesRanksTestCase extends AbstractPercentilesTestCase< T> { - @Override - protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof PercentileRanks); - PercentileRanks parsedPercentileRanks = (PercentileRanks) parsedAggregation; - - for (Percentile percentile : aggregation) { - Double value = percentile.value(); - assertEquals(aggregation.percent(value), parsedPercentileRanks.percent(value), 0); - assertEquals(aggregation.percentAsString(value), parsedPercentileRanks.percentAsString(value)); - } - - Class parsedClass = implementationClass(); - assertTrue(parsedClass != null && parsedClass.isInstance(parsedAggregation)); - } - @Override protected void assertPercentile(T agg, Double value) { assertThat(agg.percent(value), equalTo(Double.NaN)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java index 50edd9212c192..b46cfb5f5ede4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import java.util.List; @@ -17,21 +16,6 @@ public abstract class InternalPercentilesTestCase extends AbstractPercentilesTestCase { - @Override - protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof Percentiles); - Percentiles parsedPercentiles = (Percentiles) parsedAggregation; - - for (Percentile percentile : aggregation) { - Double percent = percentile.percent(); - assertEquals(aggregation.percentile(percent), parsedPercentiles.percentile(percent), 0); - assertEquals(aggregation.percentileAsString(percent), parsedPercentiles.percentileAsString(percent)); - } - - Class parsedClass = implementationClass(); - assertTrue(parsedClass != null && parsedClass.isInstance(parsedAggregation)); - } - public static double[] randomPercents() { List randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d); double[] percents = new double[randomCdfValues.size()]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index 7b9c79dfa6448..92ff5c7d97fe9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -17,10 +17,8 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -31,7 +29,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.function.Supplier; import static java.util.Collections.singletonList; @@ -183,14 +180,6 @@ public InternalScriptedMetric createTestInstanceForXContent() { ); } - @Override - protected void assertFromXContent(InternalScriptedMetric aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedScriptedMetric); - ParsedScriptedMetric parsed = (ParsedScriptedMetric) parsedAggregation; - - assertValues(aggregation.aggregation(), parsed.aggregation()); - } - private static void assertValues(Object expected, Object actual) { if (expected instanceof Long) { // longs that fit into the integer range are parsed back as integer @@ -236,11 +225,6 @@ private static void assertValues(Object expected, Object actual) { } } - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.contains(CommonFields.VALUE.getPreferredName()); - } - @Override protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java index 40550e76fa188..575b168050551 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java @@ -9,9 +9,7 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; import java.util.List; import java.util.Map; @@ -40,10 +38,4 @@ public void testReduceRandom() { protected void assertReduced(InternalStats reduced, List inputs) { // no test since reduce operation is unsupported } - - @Override - protected void assertFromXContent(InternalStats aggregation, ParsedAggregation parsedAggregation) { - super.assertFromXContent(aggregation, parsedAggregation); - assertTrue(parsedAggregation instanceof ParsedStatsBucket); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java index a0c50ffd6b2e6..9476a0830994f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.ToXContent; @@ -135,30 +134,6 @@ private void verifyStatsOfDoubles(double[] values, double expectedSum, double ex assertEquals(max, reduced.getMax(), 0d); } - @Override - protected void assertFromXContent(InternalStats aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedStats); - ParsedStats parsed = (ParsedStats) parsedAggregation; - assertStats(aggregation, parsed); - } - - static void assertStats(InternalStats aggregation, ParsedStats parsed) { - long count = aggregation.getCount(); - assertEquals(count, parsed.getCount()); - // for count == 0, fields are rendered as `null`, so we test that we parse to default values used also in the reduce phase - assertEquals(count > 0 ? aggregation.getMin() : Double.POSITIVE_INFINITY, parsed.getMin(), 0); - assertEquals(count > 0 ? aggregation.getMax() : Double.NEGATIVE_INFINITY, parsed.getMax(), 0); - assertEquals(count > 0 ? aggregation.getSum() : 0, parsed.getSum(), 0); - assertEquals(count > 0 ? aggregation.getAvg() : 0, parsed.getAvg(), 0); - // also as_string values are only rendered for count != 0 - if (count > 0) { - assertEquals(aggregation.getMinAsString(), parsed.getMinAsString()); - assertEquals(aggregation.getMaxAsString(), parsed.getMaxAsString()); - assertEquals(aggregation.getSumAsString(), parsed.getSumAsString()); - assertEquals(aggregation.getAvgAsString(), parsed.getAvgAsString()); - } - } - @Override protected InternalStats mutateInstance(InternalStats instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java index 07869acbe50ad..932b5feab3bb8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java @@ -82,11 +82,6 @@ protected void assertSampled( } } - @Override - protected Class implementationClass() { - return ParsedTDigestPercentileRanks.class; - } - @Override protected InternalTDigestPercentileRanks mutateInstance(InternalTDigestPercentileRanks instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java index 4927ce9be3abb..0e6876f1e34d6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java @@ -74,11 +74,6 @@ protected void assertSampled(InternalTDigestPercentiles sampled, InternalTDigest } } - @Override - protected Class implementationClass() { - return ParsedTDigestPercentiles.class; - } - @Override protected InternalTDigestPercentiles mutateInstance(InternalTDigestPercentiles instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 7d3799b2db35d..717d80eeabc90 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.test.ESTestCase; @@ -42,7 +41,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -159,13 +157,13 @@ private InternalTopHits createTestInstance( Map searchHitFields = new HashMap<>(); scoreDocs[i] = docBuilder.apply(docId, score); - hits[i] = new SearchHit(docId, Integer.toString(i)); + hits[i] = SearchHit.unpooled(docId, Integer.toString(i)); hits[i].addDocumentFields(searchHitFields, Collections.emptyMap()); hits[i].score(score); } int totalHits = between(actualSize, 500000); sort(hits, scoreDocs, comparator); - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); TopDocs topDocs = topDocsBuilder.apply(new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), scoreDocs); // Lucene's TopDocs initializes the maxScore to Float.NaN, if there is no maxScore @@ -189,35 +187,6 @@ private void sort(SearchHit[] searchHits, ScoreDoc[] scoreDocs, Comparator expectedHits = Arrays.asList(expectedSearchHits.getHits()); - List actualHits = Arrays.asList(actualSearchHits.getHits()); - - assertEquals(expectedHits.size(), actualHits.size()); - for (int i = 0; i < expectedHits.size(); i++) { - SearchHit expected = expectedHits.get(i); - SearchHit actual = actualHits.get(i); - - assertEquals(expected.getIndex(), actual.getIndex()); - assertEquals(expected.getId(), actual.getId()); - assertEquals(expected.getVersion(), actual.getVersion()); - assertEquals(expected.getScore(), actual.getScore(), 0.0f); - assertEquals(expected.getFields(), actual.getFields()); - assertEquals(expected.getSourceAsMap(), actual.getSourceAsMap()); - } - } - private static Object randomOfType(SortField.Type type) { return switch (type) { case CUSTOM -> throw new UnsupportedOperationException(); @@ -276,16 +245,20 @@ protected void assertReduced(InternalTopHits reduced, List inpu new TotalHits(totalHits, relation), maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore ); - assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); + try { + assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); + } finally { + expectedHits.decRef(); + } } public void testGetProperty() { // Create a SearchHit containing: { "foo": 1000.0 } and use it to initialize an InternalTopHits instance. - SearchHit hit = new SearchHit(0); + SearchHit hit = SearchHit.unpooled(0); hit = hit.sourceRef(Source.fromMap(Map.of("foo", 1000.0), XContentType.YAML).internalSourceRef()); hit.sortValues(new Object[] { 10.0 }, new DocValueFormat[] { DocValueFormat.RAW }); hit.score(1.0f); - SearchHits hits = new SearchHits(new SearchHit[] { hit }, null, 0); + SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, null, 0); InternalTopHits internalTopHits = new InternalTopHits("test", 0, 0, null, hits, null); assertEquals(internalTopHits, internalTopHits.getProperty(Collections.emptyList())); @@ -301,7 +274,7 @@ public void testGetProperty() { expectThrows(IllegalArgumentException.class, () -> internalTopHits.getProperty(List.of("_sort"))); // Two SearchHit instances are not allowed, only the first will be used without assertion. - hits = new SearchHits(new SearchHit[] { hit, hit }, null, 0); + hits = SearchHits.unpooled(new SearchHit[] { hit, hit }, null, 0); InternalTopHits internalTopHits3 = new InternalTopHits("test", 0, 0, null, hits, null); expectThrows(IllegalArgumentException.class, () -> internalTopHits3.getProperty(List.of("foo"))); } @@ -397,7 +370,7 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { int from = instance.getFrom(); int size = instance.getSize(); TopDocsAndMaxScore topDocs = instance.getTopDocs(); - SearchHits searchHits = instance.getHits(); + SearchHits searchHits = instance.getHits().asUnpooled(); Map metadata = instance.getMetadata(); switch (between(0, 5)) { case 0 -> name += randomAlphaOfLength(5); @@ -415,7 +388,7 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { searchHits.getTotalHits().value + between(1, 100), randomFrom(TotalHits.Relation.values()) ); - searchHits = new SearchHits(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); + searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); } case 5 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java index df7ae71bdfd0e..75a43717a9929 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -41,12 +40,6 @@ protected void assertSampled(InternalValueCount sampled, InternalValueCount redu assertThat(sampled.getValue(), equalTo(samplingContext.scaleUp(reduced.getValue()))); } - @Override - protected void assertFromXContent(InternalValueCount valueCount, ParsedAggregation parsedAggregation) { - assertEquals(valueCount.getValue(), ((ParsedValueCount) parsedAggregation).getValue(), 0); - assertEquals(valueCount.getValueAsString(), ((ParsedValueCount) parsedAggregation).getValueAsString()); - } - @Override protected InternalValueCount mutateInstance(InternalValueCount instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java index 270648e3b0795..db0edee108f20 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -57,16 +56,6 @@ protected void assertSampled(InternalWeightedAvg sampled, InternalWeightedAvg re assertThat(sampled.getValue(), equalTo(reduced.getValue())); } - @Override - protected void assertFromXContent(InternalWeightedAvg avg, ParsedAggregation parsedAggregation) { - ParsedWeightedAvg parsed = ((ParsedWeightedAvg) parsedAggregation); - assertEquals(avg.getValue(), parsed.getValue(), Double.MIN_VALUE); - // we don't print out VALUE_AS_STRING for avg.getCount() == 0, so we cannot get the exact same value back - if (avg.getWeight() != 0) { - assertEquals(avg.getValueAsString(), parsed.getValueAsString()); - } - } - @Override protected InternalWeightedAvg mutateInstance(InternalWeightedAvg instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java index 6dd7e7b259b3f..a72ccb948a9b4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -44,19 +43,6 @@ protected void assertSampled(Max sampled, Max reduced, SamplingContext samplingC assertThat(sampled.value(), equalTo(reduced.value())); } - @Override - protected void assertFromXContent(Max max, ParsedAggregation parsedAggregation) { - ParsedMax parsed = ((ParsedMax) parsedAggregation); - if (Double.isInfinite(max.value()) == false) { - assertEquals(max.value(), parsed.value(), Double.MIN_VALUE); - assertEquals(max.getValueAsString(), parsed.getValueAsString()); - } else { - // we write Double.NEGATIVE_INFINITY and Double.POSITIVE_INFINITY to xContent as 'null', so we - // cannot differentiate between them. Also we cannot recreate the exact String representation - assertEquals(parsed.value(), Double.NEGATIVE_INFINITY, 0); - } - } - @Override protected Max mutateInstance(Max instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java index d337d285336b1..a80476b545bc6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -43,19 +42,6 @@ protected void assertSampled(Min sampled, Min reduced, SamplingContext samplingC assertThat(sampled.value(), equalTo(reduced.value())); } - @Override - protected void assertFromXContent(Min min, ParsedAggregation parsedAggregation) { - ParsedMin parsed = ((ParsedMin) parsedAggregation); - if (Double.isInfinite(min.value()) == false) { - assertEquals(min.value(), parsed.value(), Double.MIN_VALUE); - assertEquals(min.getValueAsString(), parsed.getValueAsString()); - } else { - // we write Double.NEGATIVE_INFINITY and Double.POSITIVE_INFINITY to xContent as 'null', so we - // cannot differentiate between them. Also we cannot recreate the exact String representation - assertEquals(parsed.value(), Double.POSITIVE_INFINITY, 0); - } - } - @Override protected Min mutateInstance(Min instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java index b6e4c15f5c029..c8d3e944a3ddf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; @@ -84,13 +83,6 @@ private void verifySummationOfDoubles(double[] values, double expected, double d assertEquals(expected, reduced.value(), delta); } - @Override - protected void assertFromXContent(Sum sum, ParsedAggregation parsedAggregation) { - ParsedSum parsed = ((ParsedSum) parsedAggregation); - assertEquals(sum.value(), parsed.value(), Double.MIN_VALUE); - assertEquals(sum.getValueAsString(), parsed.getValueAsString()); - } - @Override protected Sum mutateInstance(Sum instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java index f242e19012a35..0fe660e56822c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java @@ -194,7 +194,7 @@ public void testSerialization() throws IOException { backwardsCompatible.add(i); } - TDigestState serialized = writeToAndReadFrom(state, TransportVersions.V_8_500_020); + TDigestState serialized = writeToAndReadFrom(state, TransportVersions.V_8_9_X); assertEquals(serialized, state); TDigestState serializedBackwardsCompatible = writeToAndReadFrom(state, TransportVersions.V_8_8_1); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index 05fcb45c71ee9..8e6d9b5788c54 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -25,10 +25,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -119,12 +118,12 @@ public void testSameAggNames() throws IOException { // Finally, reduce the pipeline agg PipelineAggregator avgBucketAgg = avgBucketBuilder.createInternal(Collections.emptyMap()); - List reducedAggs = new ArrayList<>(2); + List reducedAggs = new ArrayList<>(2); // Histo has to go first to exercise the bug reducedAggs.add(histogramResult); reducedAggs.add(avgResult); - Aggregations aggregations = new Aggregations(reducedAggs); + InternalAggregations aggregations = InternalAggregations.from(reducedAggs); InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator) avgBucketAgg).doReduce(aggregations, null); assertNotNull(pipelineResult); } @@ -174,10 +173,10 @@ public void testComplicatedBucketPath() throws IOException { // Finally, reduce the pipeline agg PipelineAggregator avgBucketAgg = avgBucketBuilder.createInternal(Collections.emptyMap()); - List reducedAggs = new ArrayList<>(4); + List reducedAggs = new ArrayList<>(4); reducedAggs.add(filterResult); - Aggregations aggregations = new Aggregations(reducedAggs); + InternalAggregations aggregations = InternalAggregations.from(reducedAggs); InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator) avgBucketAgg).doReduce(aggregations, null); assertNotNull(pipelineResult); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index d4c004a6e776f..5e1607298a0bb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; @@ -76,7 +76,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return null; } @@ -156,7 +156,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return null; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java index 2eb5f3a45b4ac..87ad90e1de160 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.Arrays; @@ -42,20 +41,6 @@ protected void assertReduced(InternalBucketMetricValue reduced, List inputs) { // no test since reduce operation is unsupported } - - @Override - protected void assertFromXContent(InternalExtendedStats aggregation, ParsedAggregation parsedAggregation) { - super.assertFromXContent(aggregation, parsedAggregation); - assertTrue(parsedAggregation instanceof ParsedExtendedStatsBucket); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java index 98f957241629e..99163fe821619 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation.CommonFields; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.ToXContent; @@ -27,7 +25,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents; import static org.hamcrest.Matchers.equalTo; @@ -73,22 +70,6 @@ protected void assertReduced(InternalPercentilesBucket reduced, List parsedAggregation = parseAndAssert(aggregation, false, false); - Iterator it = aggregation.iterator(); - Iterator parsedIt = parsedAggregation.iterator(); - while (it.hasNext()) { - assertEquals(it.next(), parsedIt.next()); - } - } - public void testEmptyRanksXContent() throws IOException { double[] percents = new double[] { 1, 2, 3 }; double[] percentiles = new double[3]; @@ -183,11 +154,6 @@ public void testEmptyRanksXContent() throws IOException { assertThat(Strings.toString(builder), equalTo(expected)); } - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); - } - @Override protected InternalPercentilesBucket mutateInstance(InternalPercentilesBucket instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java index 261924aaf45be..4650ac627c962 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.HashMap; @@ -38,19 +37,6 @@ protected void assertReduced(InternalSimpleValue reduced, List builder.build(searchExecutionContext)); assertEquals( exc.getMessage(), @@ -194,7 +194,7 @@ public void testBuild() throws IOException { keywordFieldType = new KeywordFieldMapper.KeywordFieldType("field", false, true, Collections.emptyMap()); when(searchExecutionContext.getFieldType("field")).thenReturn(keywordFieldType); - kbuilder.setInnerHits(new InnerHitBuilder()); + kbuilder.setInnerHits(new InnerHitBuilder().setName("field")); exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); assertEquals( exc.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index 4c8484be200e5..f8af8a2e3109b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -81,6 +81,7 @@ protected SearchHit nextDoc(int doc) { assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { assertThat(hits[i].docId(), equalTo(docs[i])); + hits[i].decRef(); } reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index e0a26fbc67ffd..a5371e7b0b00a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -82,7 +82,7 @@ public void testDocValueFetcher() throws IOException { for (LeafReaderContext context : reader.leaves()) { processor.setNextReader(context); for (int doc = 0; doc < context.reader().maxDoc(); doc++) { - SearchHit searchHit = new SearchHit(doc + context.docBase); + SearchHit searchHit = SearchHit.unpooled(doc + context.docBase); processor.process(new FetchSubPhase.HitContext(searchHit, context, doc, Map.of(), Source.empty(null))); assertNotNull(searchHit.getFields().get("field")); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 620706a01c88f..3a4d67ae281f2 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -170,7 +170,7 @@ private HitContext hitExecuteMultiple( when(sec.isSourceEnabled()).thenReturn(sourceBuilder != null); when(fetchContext.getSearchExecutionContext()).thenReturn(sec); - final SearchHit searchHit = new SearchHit(1, null, nestedIdentity); + final SearchHit searchHit = SearchHit.unpooled(1, null, nestedIdentity); // We don't need a real index, just a LeafReaderContext which cannot be mocked. MemoryIndex index = new MemoryIndex(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index 7a1751dbd41fc..be36d72304bd0 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -1169,7 +1169,7 @@ public void testNestedGrouping() throws IOException { """; var results = fetchFields(mapperService, source, fieldAndFormatList("*", null, false)); - SearchHit searchHit = new SearchHit(0); + SearchHit searchHit = SearchHit.unpooled(0); searchHit.addDocumentFields(results, Map.of()); assertThat(Strings.toString(searchHit), containsString("\"ml.top_classes\":")); } diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java index 9d02b7b5f4dcd..8b9869e8744a9 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java @@ -77,8 +77,8 @@ protected GenericWriteableWrapper copyInstance(GenericWriteableWrapper instance, } public void testSerializationFailsWithOlderVersion() { - TransportVersion older = TransportVersions.SEARCH_APP_INDICES_REMOVED; - assert older.before(TransportVersions.GENERIC_NAMED_WRITABLE_ADDED); + TransportVersion older = TransportVersions.V_8_10_X; + assert older.before(TransportVersions.V_8_11_X); final var testInstance = createTestInstance().geoBoundingBox(); try (var output = new BytesStreamOutput()) { output.setTransportVersion(older); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java index ed92bdb1e5919..b16e8f68c7e32 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java @@ -237,7 +237,7 @@ public void testChannelVersion() throws Exception { version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_8_0, TransportVersion.current()); } if (request.source() != null && request.source().subSearches().size() >= 2) { - version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_500_020, TransportVersion.current()); + version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_9_X, TransportVersion.current()); } request = copyWriteable(request, namedWriteableRegistry, ShardSearchRequest::new, version); channelVersion = TransportVersion.min(channelVersion, version); diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index 516ffeb9418bd..949f4b9e0677b 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregationsTests; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; @@ -115,8 +115,8 @@ public void testSerialization() throws Exception { assertEquals(querySearchResult.hasAggs(), deserialized.hasAggs()); if (deserialized.hasAggs()) { assertThat(deserialized.aggregations().isSerialized(), is(delayed)); - Aggregations aggs = querySearchResult.consumeAggs(); - Aggregations deserializedAggs = deserialized.consumeAggs(); + InternalAggregations aggs = querySearchResult.consumeAggs(); + InternalAggregations deserializedAggs = deserialized.consumeAggs(); assertEquals(aggs.asList(), deserializedAggs.asList()); assertThat(deserialized.aggregations(), is(nullValue())); } diff --git a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java index f42ca49dc14b9..7aece1476a99d 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java @@ -67,7 +67,7 @@ public ThrowingQueryBuilder(StreamInput in) throws IOException { this.randomUID = in.readLong(); this.failure = in.readException(); this.shardId = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.index = in.readOptionalString(); } else { this.index = null; @@ -79,7 +79,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(randomUID); out.writeException(failure); out.writeVInt(shardId); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalString(index); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 8a82ae8ce7268..42fe65c8d14ef 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -54,6 +54,9 @@ public static Option createTestItem() { } Option option = new CompletionSuggestion.Entry.Option(docId, text, score, contexts); option.setHit(hit); + if (hit != null) { + hit.decRef(); + } return option; } diff --git a/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java new file mode 100644 index 0000000000000..02093d9fa0e44 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.vectors; + +import org.apache.lucene.queries.function.FunctionQuery; +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Locale; + +public class ExactKnnQueryBuilderTests extends AbstractQueryTestCase { + + private static final String VECTOR_FIELD = "vector"; + private static final int VECTOR_DIMENSION = 3; + + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject(VECTOR_FIELD) + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .field("index", true) + .field("similarity", "cosine") + .endObject() + .endObject() + .endObject(); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(builder)), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + + @Override + protected Collection> getPlugins() { + return List.of(TestGeoShapeFieldMapperPlugin.class); + } + + @Override + protected ExactKnnQueryBuilder doCreateTestQueryBuilder() { + float[] query = new float[VECTOR_DIMENSION]; + for (int i = 0; i < VECTOR_DIMENSION; i++) { + query[i] = randomFloat(); + } + return new ExactKnnQueryBuilder(query, VECTOR_FIELD); + } + + @Override + public void testValidOutput() { + ExactKnnQueryBuilder query = new ExactKnnQueryBuilder(new float[] { 1.0f, 2.0f, 3.0f }, "field"); + String expected = """ + { + "exact_knn" : { + "query" : [ + 1.0, + 2.0, + 3.0 + ], + "field" : "field" + } + }"""; + assertEquals(expected, query.toString()); + } + + @Override + protected void doAssertLuceneQuery(ExactKnnQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { + assertTrue(query instanceof BooleanQuery); + BooleanQuery booleanQuery = (BooleanQuery) query; + boolean foundFunction = false; + for (BooleanClause clause : booleanQuery) { + if (clause.getQuery() instanceof FunctionQuery functionQuery) { + foundFunction = true; + assertTrue(functionQuery.getValueSource() instanceof FloatVectorSimilarityFunction); + String description = functionQuery.getValueSource().description().toLowerCase(Locale.ROOT); + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.NORMALIZED_VECTOR_COSINE)) { + assertTrue(description, description.contains("dot_product")); + } else { + assertTrue(description, description.contains("cosine")); + } + } + } + assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction); + } + + @Override + public void testUnknownObjectException() { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testFromXContent() throws IOException { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testUnknownField() { + // Test isn't relevant, since query is never parsed from xContent + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index eceafe6d12ac9..67bc6bde9c1af 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -23,8 +23,10 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.index.query.InnerHitsRewriteContext; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; @@ -38,6 +40,7 @@ import java.util.List; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.elasticsearch.search.vectors.KnnSearchBuilderTests.randomVector; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -58,12 +61,20 @@ protected KnnScoreDocQueryBuilder doCreateTestQueryBuilder() { for (int doc = 0; doc < numDocs; doc++) { scoreDocs.add(new ScoreDoc(doc, randomFloat())); } - return new KnnScoreDocQueryBuilder(scoreDocs.toArray(new ScoreDoc[0])); + return new KnnScoreDocQueryBuilder( + scoreDocs.toArray(new ScoreDoc[0]), + randomBoolean() ? "field" : null, + randomBoolean() ? randomVector(10) : null + ); } @Override public void testValidOutput() { - KnnScoreDocQueryBuilder query = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }); + KnnScoreDocQueryBuilder query = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, + "field", + new float[] { 1.0f, 2.0f } + ); String expected = """ { "knn_score_doc" : { @@ -76,6 +87,11 @@ public void testValidOutput() { "doc" : 5, "score" : 1.6 } + ], + "field" : "field", + "query" : [ + 1.0, + 2.0 ] } }"""; @@ -144,11 +160,36 @@ public void testMustRewrite() throws IOException { } public void testRewriteToMatchNone() throws IOException { - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(new ScoreDoc[0]); - SearchExecutionContext context = createSearchExecutionContext(); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( + new ScoreDoc[0], + randomBoolean() ? "field" : null, + randomBoolean() ? randomVector(10) : null + ); + QueryRewriteContext context = randomBoolean() + ? new InnerHitsRewriteContext(createSearchExecutionContext().getParserConfig(), System::currentTimeMillis) + : createSearchExecutionContext(); assertEquals(new MatchNoneQueryBuilder(), queryBuilder.rewrite(context)); } + public void testRewriteForInnerHits() throws IOException { + SearchExecutionContext context = createSearchExecutionContext(); + InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), System::currentTimeMillis); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, + randomAlphaOfLength(10), + randomVector(10) + ); + queryBuilder.boost(randomFloat()); + queryBuilder.queryName(randomAlphaOfLength(10)); + QueryBuilder rewritten = queryBuilder.rewrite(innerHitsRewriteContext); + assertTrue(rewritten instanceof ExactKnnQueryBuilder); + ExactKnnQueryBuilder exactKnnQueryBuilder = (ExactKnnQueryBuilder) rewritten; + assertEquals(queryBuilder.queryVector(), exactKnnQueryBuilder.getQuery()); + assertEquals(queryBuilder.fieldName(), exactKnnQueryBuilder.getField()); + assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f); + assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName()); + } + @Override public void testUnknownObjectException() { // Test isn't relevant, since query is never parsed from xContent @@ -185,7 +226,7 @@ public void testScoreDocQueryWeightCount() throws IOException { } ScoreDoc[] scoreDocs = scoreDocsList.toArray(new ScoreDoc[0]); - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs, "field", randomVector(10)); Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { @@ -228,7 +269,7 @@ public void testScoreDocQuery() throws IOException { } ScoreDoc[] scoreDocs = scoreDocsList.toArray(new ScoreDoc[0]); - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs, "field", randomVector(10)); final Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 1df74c787eec4..97275f7305b20 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2138,8 +2138,7 @@ protected void assertSnapshotOrGenericThread() { transportService.getTaskManager(), () -> clusterService.localNode().getId(), transportService.getLocalNodeConnection(), - transportService.getRemoteClusterService(), - new NamedWriteableRegistry(List.of()) + transportService.getRemoteClusterService() ); } diff --git a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java index 56e72c25802e3..348ff8d10d8b1 100644 --- a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java @@ -229,7 +229,7 @@ private static class ChildResponseHandler extends TransportResponseHandler.Empty } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java b/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java deleted file mode 100644 index 793c1f60c38e6..0000000000000 --- a/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.tasks; - -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.test.AbstractXContentTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.net.ConnectException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.function.Predicate; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class CancelTasksResponseTests extends AbstractXContentTestCase { - - // CancelTasksResponse doesn't directly implement ToXContent because it has multiple XContent representations, so we must wrap here - public record CancelTasksResponseWrapper(CancelTasksResponse in) implements ToXContentObject { - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return ChunkedToXContent.wrapAsToXContent(in.groupedByNone()).toXContent(builder, params); - } - } - - @Override - protected CancelTasksResponseWrapper createTestInstance() { - List randomTasks = randomTasks(); - return new CancelTasksResponseWrapper(new CancelTasksResponse(randomTasks, Collections.emptyList(), Collections.emptyList())); - } - - private static List randomTasks() { - List randomTasks = new ArrayList<>(); - for (int i = 0; i < randomInt(10); i++) { - randomTasks.add(TaskInfoTests.randomTaskInfo()); - } - return randomTasks; - } - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // status and headers hold arbitrary content, we can't inject random fields in them - return field -> field.endsWith("status") || field.endsWith("headers"); - } - - @Override - protected void assertEqualInstances(CancelTasksResponseWrapper expectedInstanceWrapper, CancelTasksResponseWrapper newInstanceWrapper) { - final var expectedInstance = expectedInstanceWrapper.in(); - final var newInstance = newInstanceWrapper.in(); - assertNotSame(expectedInstance, newInstance); - assertThat(newInstance.getTasks(), equalTo(expectedInstance.getTasks())); - ListTasksResponseTests.assertOnNodeFailures(newInstance.getNodeFailures(), expectedInstance.getNodeFailures()); - ListTasksResponseTests.assertOnTaskFailures(newInstance.getTaskFailures(), expectedInstance.getTaskFailures()); - } - - @Override - protected CancelTasksResponseWrapper doParseInstance(XContentParser parser) { - return new CancelTasksResponseWrapper(CancelTasksResponse.fromXContent(parser)); - } - - @Override - protected boolean supportsUnknownFields() { - return true; - } - - /** - * Test parsing {@link ListTasksResponse} with inner failures as they don't support asserting on xcontent equivalence, given that - * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} - * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. - */ - public void testFromXContentWithFailures() throws IOException { - Supplier instanceSupplier = CancelTasksResponseTests::createTestInstanceWithFailures; - // with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, - // but that does not bother our assertions, as we only want to test that we don't break. - boolean supportsUnknownFields = true; - // exceptions are not of the same type whenever parsed back - boolean assertToXContentEquivalence = false; - AbstractXContentTestCase.testFromXContent( - NUMBER_OF_TEST_RUNS, - instanceSupplier, - supportsUnknownFields, - Strings.EMPTY_ARRAY, - getRandomFieldsExcludeFilter(), - this::createParser, - this::doParseInstance, - this::assertEqualInstances, - assertToXContentEquivalence, - ToXContent.EMPTY_PARAMS - ); - } - - private static CancelTasksResponseWrapper createTestInstanceWithFailures() { - int numNodeFailures = randomIntBetween(0, 3); - List nodeFailures = new ArrayList<>(numNodeFailures); - for (int i = 0; i < numNodeFailures; i++) { - nodeFailures.add(new FailedNodeException(randomAlphaOfLength(5), "error message", new ConnectException())); - } - int numTaskFailures = randomIntBetween(0, 3); - List taskFailures = new ArrayList<>(numTaskFailures); - for (int i = 0; i < numTaskFailures; i++) { - taskFailures.add(new TaskOperationFailure(randomAlphaOfLength(5), randomLong(), new IllegalStateException())); - } - return new CancelTasksResponseWrapper(new CancelTasksResponse(randomTasks(), taskFailures, nodeFailures)); - } - -} diff --git a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 0b84f14c56ecb..93dd7bc618756 100644 --- a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -9,7 +9,7 @@ package org.elasticsearch.test.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -55,7 +55,7 @@ private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase test StringTerms classes = response.getAggregations().get("class"); Assert.assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); + Map aggs = classBucket.getAggregations().asMap(); Assert.assertTrue(aggs.containsKey("sig_terms")); SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); Assert.assertThat(agg.getBuckets().size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/threadpool/FixedThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/FixedThreadPoolTests.java index 5c355c8009d54..6be78f27135a5 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/FixedThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/FixedThreadPoolTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.concurrent.CountDownLatch; @@ -33,7 +34,7 @@ public void testRejectedExecutionCounter() throws InterruptedException { .put("thread_pool." + threadPoolName + ".queue_size", queueSize) .build(); try { - threadPool = new ThreadPool(nodeSettings); + threadPool = new ThreadPool(nodeSettings, MeterRegistry.NOOP); // these tasks will consume the thread pool causing further // submissions to queue diff --git a/server/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java index 8d7a486ee79f0..9a0c5c4b75d54 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.hamcrest.Matcher; import java.util.HashMap; @@ -424,7 +425,7 @@ public void runScalingThreadPoolTest(final Settings settings, final BiConsumer connections = Collections.newSetFromMap(new ConcurrentHashMap<>()); + Set connections = ConcurrentCollections.newConcurrentSet(); DiscoveryNode node = DiscoveryNodeUtils.create("", new TransportAddress(InetAddress.getLoopbackAddress(), 0)); doAnswer(invocationOnMock -> { diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index 39d5d768f81ab..3d3026a6788ac 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -139,7 +139,7 @@ public void testRequestAndResponse() throws Exception { long requestId = responseHandlers.add(new TransportResponseHandler() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java index b745756eece0e..c350e2a4cfaa8 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java @@ -86,8 +86,6 @@ public void testSearchShards() throws Exception { service.acceptIncomingRequests(); final var client = new RemoteClusterAwareClient( - Settings.EMPTY, - threadPool, service, "cluster1", threadPool.executor(TEST_THREAD_POOL_NAME), @@ -104,7 +102,7 @@ public void testSearchShards() throws Exception { ); final SearchShardsResponse searchShardsResponse = PlainActionFuture.get( future -> client.execute( - TransportSearchShardsAction.TYPE, + TransportSearchShardsAction.REMOTE_TYPE, searchShardsRequest, ActionListener.runBefore( future, @@ -142,14 +140,7 @@ public void testSearchShardsThreadContextHeader() { service.start(); service.acceptIncomingRequests(); - final var client = new RemoteClusterAwareClient( - Settings.EMPTY, - threadPool, - service, - "cluster1", - EsExecutors.DIRECT_EXECUTOR_SERVICE, - randomBoolean() - ); + final var client = new RemoteClusterAwareClient(service, "cluster1", EsExecutors.DIRECT_EXECUTOR_SERVICE, randomBoolean()); int numThreads = 10; ExecutorService executorService = Executors.newFixedThreadPool(numThreads); @@ -169,7 +160,7 @@ public void testSearchShardsThreadContextHeader() { null ); client.execute( - TransportSearchShardsAction.TYPE, + TransportSearchShardsAction.REMOTE_TYPE, searchShardsRequest, ActionListener.runBefore( future, diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 51b81cf1862e2..44fa6ca8cbdf1 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -9,9 +9,13 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -90,22 +94,20 @@ public void testConnectAndExecuteRequest() throws Exception { logger.info("now accepting incoming requests on local transport"); RemoteClusterService remoteClusterService = service.getRemoteClusterService(); assertTrue(remoteClusterService.isRemoteNodeConnected("test", remoteNode)); - Client client = remoteClusterService.getRemoteClusterClient( - threadPool, + var client = remoteClusterService.getRemoteClusterClient( "test", threadPool.executor(TEST_THREAD_POOL_NAME), randomBoolean() ); ClusterStateResponse clusterStateResponse = PlainActionFuture.get( - future -> client.admin() - .cluster() - .prepareState() - .execute( - ActionListener.runBefore( - future, - () -> assertTrue(Thread.currentThread().getName().contains('[' + TEST_THREAD_POOL_NAME + ']')) - ) - ), + future -> client.execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(), + ActionListener.runBefore( + future, + () -> assertTrue(Thread.currentThread().getName().contains('[' + TEST_THREAD_POOL_NAME + ']')) + ) + ), 10, TimeUnit.SECONDS ); @@ -114,7 +116,9 @@ public void testConnectAndExecuteRequest() throws Exception { // also test a failure, there is no handler for scroll registered ActionNotFoundTransportException ex = expectThrows( ActionNotFoundTransportException.class, - () -> client.prepareSearchScroll("").get() + () -> PlainActionFuture.get( + future -> client.execute(TransportSearchScrollAction.REMOTE_TYPE, new SearchScrollRequest(""), future) + ) ); assertEquals("No handler for action [indices:data/read/scroll]", ex.getMessage()); } @@ -167,13 +171,10 @@ public void testEnsureWeReconnect() throws Exception { connectionManager.disconnectFromNode(remoteNode); closeFuture.get(); - Client client = remoteClusterService.getRemoteClusterClient( - threadPool, - "test", - EsExecutors.DIRECT_EXECUTOR_SERVICE, - true + var client = remoteClusterService.getRemoteClusterClient("test", EsExecutors.DIRECT_EXECUTOR_SERVICE, true); + ClusterStateResponse clusterStateResponse = PlainActionFuture.get( + f -> client.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(), f) ); - ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().execute().get(); assertNotNull(clusterStateResponse); assertEquals("foo_bar_cluster", clusterStateResponse.getState().getClusterName().value()); assertTrue(remoteClusterConnection.isNodeConnected(remoteNode)); @@ -198,7 +199,7 @@ public void testRemoteClusterServiceNotEnabled() { final RemoteClusterService remoteClusterService = service.getRemoteClusterService(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> remoteClusterService.getRemoteClusterClient(threadPool, "test", EsExecutors.DIRECT_EXECUTOR_SERVICE, randomBoolean()) + () -> remoteClusterService.getRemoteClusterClient("test", EsExecutors.DIRECT_EXECUTOR_SERVICE, randomBoolean()) ); assertThat(e.getMessage(), equalTo("this node does not have the remote_cluster_client role")); } @@ -241,7 +242,7 @@ public void testQuicklySkipUnavailableClusters() throws Exception { service.start(); service.acceptIncomingRequests(); RemoteClusterService remoteClusterService = service.getRemoteClusterService(); - Client client = remoteClusterService.getRemoteClusterClient(threadPool, "test", EsExecutors.DIRECT_EXECUTOR_SERVICE); + var client = remoteClusterService.getRemoteClusterClient("test", EsExecutors.DIRECT_EXECUTOR_SERVICE); try { assertFalse(remoteClusterService.isRemoteNodeConnected("test", remoteNode)); @@ -249,7 +250,9 @@ public void testQuicklySkipUnavailableClusters() throws Exception { // check that we quickly fail expectThrows( NoSuchRemoteClusterException.class, - () -> client.admin().cluster().prepareState().get(TimeValue.timeValueSeconds(10)) + () -> PlainActionFuture.get( + f -> client.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(), f) + ) ); } finally { service.clearAllRules(); @@ -258,7 +261,9 @@ public void testQuicklySkipUnavailableClusters() throws Exception { assertBusy(() -> { try { - client.admin().cluster().prepareState().get(); + PlainActionFuture.get( + f -> client.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(), f) + ); } catch (NoSuchRemoteClusterException e) { // keep retrying on this exception, the goal is to check that we eventually reconnect throw new AssertionError(e); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 947b894124137..2ebd51b2d7f48 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.ReleasableRef; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; @@ -147,31 +148,36 @@ public static MockTransportService startTransport( } SearchHits searchHits; if ("null_target".equals(request.preference())) { - searchHits = new SearchHits( - new SearchHit[] { new SearchHit(0) }, + searchHits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(0) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F ); } else { searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); } - SearchResponse searchResponse = new SearchResponse( - searchHits, - InternalAggregations.EMPTY, - null, - false, - null, - null, - 1, - null, - 1, - 1, - 0, - 100, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - channel.sendResponse(searchResponse); + try ( + var searchResponseRef = ReleasableRef.of( + new SearchResponse( + searchHits, + InternalAggregations.EMPTY, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) + ) + ) { + channel.sendResponse(searchResponseRef.get()); + } } ); newService.registerRequestHandler( diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java index f02148a40e47e..b814138f3ed22 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -20,18 +22,80 @@ public void testResolveRemoteClusterCredentials() { final String clusterAlias = randomAlphaOfLength(9); final String otherClusterAlias = randomAlphaOfLength(10); - final String secret = randomAlphaOfLength(20); - final Settings settings = buildSettingsWithCredentials(clusterAlias, secret); - RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(settings); - assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(secret)); - assertThat(credentialsManager.hasCredentials(otherClusterAlias), is(false)); + final RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(Settings.EMPTY); + { + final String secret = randomAlphaOfLength(20); + final Settings settings = buildSettingsWithCredentials(clusterAlias, secret); + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + settings + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(clusterAlias)); + assertThat(actual.removedClusterAliases(), is(empty())); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(secret)); + assertThat(credentialsManager.hasCredentials(otherClusterAlias), is(false)); + } - final String updatedSecret = randomAlphaOfLength(21); - credentialsManager.updateClusterCredentials(buildSettingsWithCredentials(clusterAlias, updatedSecret)); - assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(updatedSecret)); + { + final String updatedSecret = randomAlphaOfLength(21); + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithCredentials(clusterAlias, updatedSecret) + ); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(updatedSecret)); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), is(empty())); + } - credentialsManager.updateClusterCredentials(Settings.EMPTY); - assertThat(credentialsManager.hasCredentials(clusterAlias), is(false)); + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + Settings.EMPTY + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(clusterAlias)); + assertThat(credentialsManager.hasCredentials(clusterAlias), is(false)); + } + } + + public void testUpdateRemoteClusterCredentials() { + final String clusterAlias = randomAlphaOfLength(9); + final String otherClusterAlias = randomAlphaOfLength(10); + + final RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(Settings.EMPTY); + + // addition + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(clusterAlias, otherClusterAlias) + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(clusterAlias, otherClusterAlias)); + assertThat(actual.removedClusterAliases(), is(empty())); + } + + // update and removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(clusterAlias) + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + } + + // addition and removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + buildSettingsWithRandomCredentialsForAliases(otherClusterAlias) + ); + assertThat(actual.addedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(clusterAlias)); + } + + // removal + { + final RemoteClusterCredentialsManager.UpdateRemoteClusterCredentialsResult actual = credentialsManager.updateClusterCredentials( + Settings.EMPTY + ); + assertThat(actual.addedClusterAliases(), is(empty())); + assertThat(actual.removedClusterAliases(), containsInAnyOrder(otherClusterAlias)); + } } private Settings buildSettingsWithCredentials(String clusterAlias, String secret) { @@ -40,4 +104,13 @@ private Settings buildSettingsWithCredentials(String clusterAlias, String secret secureSettings.setString("cluster.remote." + clusterAlias + ".credentials", secret); return builder.setSecureSettings(secureSettings).build(); } + + private Settings buildSettingsWithRandomCredentialsForAliases(String... clusterAliases) { + final Settings.Builder builder = Settings.builder(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + for (var alias : clusterAliases) { + secureSettings.setString("cluster.remote." + alias + ".credentials", randomAlphaOfLength(42)); + } + return builder.setSecureSettings(secureSettings).build(); + } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 1a530a1602b18..29a5d5a34e37f 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -1426,6 +1426,203 @@ public void testUseDifferentTransportProfileForCredentialsProtectedRemoteCluster } } + public void testUpdateRemoteClusterCredentialsRebuildsConnectionWithCorrectProfile() throws IOException, InterruptedException { + final List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService c = startTransport( + "cluster_1", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ) + ) { + final DiscoveryNode discoNode = c.getLocalDiscoNode().withTransportAddress(c.boundRemoteAccessAddress().publishAddress()); + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + + try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { + service.initializeRemoteClusters(); + + final Settings clusterSettings = buildRemoteClusterSettings("cluster_1", discoNode.getAddress().toString()); + final CountDownLatch latch = new CountDownLatch(1); + service.updateRemoteCluster("cluster_1", clusterSettings, connectionListener(latch)); + latch.await(); + + assertConnectionHasProfile(service.getRemoteClusterConnection("cluster_1"), "default"); + + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote.cluster_1.credentials", randomAlphaOfLength(10)); + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder().put(clusterSettings).setSecureSettings(secureSettings).build(); + service.updateRemoteClusterCredentials(() -> settings, listener); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile( + service.getRemoteClusterConnection("cluster_1"), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + + { + final PlainActionFuture listener = new PlainActionFuture<>(); + service.updateRemoteClusterCredentials( + // Settings without credentials constitute credentials removal + () -> clusterSettings, + listener + ); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile(service.getRemoteClusterConnection("cluster_1"), "default"); + } + } + } + } + + public void testUpdateRemoteClusterCredentialsRebuildsMultipleConnectionsDespiteFailures() throws IOException, InterruptedException { + final List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService c1 = startTransport( + "cluster_1", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ); + MockTransportService c2 = startTransport( + "cluster_2", + knownNodes, + VersionInformation.CURRENT, + TransportVersion.current(), + Settings.builder() + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), "0") + .build() + ) + ) { + final DiscoveryNode c1DiscoNode = c1.getLocalDiscoNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode c2DiscoNode = c2.getLocalDiscoNode().withTransportAddress(c2.boundRemoteAccessAddress().publishAddress()); + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + // fail on connection attempt + transportService.addConnectBehavior(c2DiscoNode.getAddress(), (transport, discoveryNode, profile, listener) -> { + throw new RuntimeException("bad cluster"); + }); + + transportService.start(); + transportService.acceptIncomingRequests(); + + final String goodCluster = randomAlphaOfLength(10); + final String badCluster = randomValueOtherThan(goodCluster, () -> randomAlphaOfLength(10)); + final String missingCluster = randomValueOtherThanMany( + alias -> alias.equals(goodCluster) || alias.equals(badCluster), + () -> randomAlphaOfLength(10) + ); + try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { + service.initializeRemoteClusters(); + + final Settings cluster1Settings = buildRemoteClusterSettings(goodCluster, c1DiscoNode.getAddress().toString()); + final var latch = new CountDownLatch(1); + service.updateRemoteCluster(goodCluster, cluster1Settings, connectionListener(latch)); + latch.await(); + + final Settings cluster2Settings = buildRemoteClusterSettings(badCluster, c2DiscoNode.getAddress().toString()); + final PlainActionFuture future = new PlainActionFuture<>(); + service.updateRemoteCluster(badCluster, cluster2Settings, future); + final var ex = expectThrows(Exception.class, () -> future.actionGet(10, TimeUnit.SECONDS)); + assertThat(ex.getMessage(), containsString("bad cluster")); + + assertConnectionHasProfile(service.getRemoteClusterConnection(goodCluster), "default"); + assertConnectionHasProfile(service.getRemoteClusterConnection(badCluster), "default"); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote." + badCluster + ".credentials", randomAlphaOfLength(10)); + secureSettings.setString("cluster.remote." + goodCluster + ".credentials", randomAlphaOfLength(10)); + secureSettings.setString("cluster.remote." + missingCluster + ".credentials", randomAlphaOfLength(10)); + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder() + .put(cluster1Settings) + .put(cluster2Settings) + .setSecureSettings(secureSettings) + .build(); + service.updateRemoteClusterCredentials(() -> settings, listener); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile( + service.getRemoteClusterConnection(goodCluster), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + assertConnectionHasProfile( + service.getRemoteClusterConnection(badCluster), + RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + ); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + + { + final PlainActionFuture listener = new PlainActionFuture<>(); + final Settings settings = Settings.builder().put(cluster1Settings).put(cluster2Settings).build(); + service.updateRemoteClusterCredentials( + // Settings without credentials constitute credentials removal + () -> settings, + listener + ); + listener.actionGet(10, TimeUnit.SECONDS); + } + + assertConnectionHasProfile(service.getRemoteClusterConnection(goodCluster), "default"); + assertConnectionHasProfile(service.getRemoteClusterConnection(badCluster), "default"); + expectThrows(NoSuchRemoteClusterException.class, () -> service.getRemoteClusterConnection(missingCluster)); + } + } + } + } + + private static void assertConnectionHasProfile(RemoteClusterConnection remoteClusterConnection, String expectedConnectionProfile) { + assertThat( + remoteClusterConnection.getConnectionManager().getConnectionProfile().getTransportProfile(), + equalTo(expectedConnectionProfile) + ); + } + + private Settings buildRemoteClusterSettings(String clusterAlias, String address) { + final Settings.Builder settings = Settings.builder(); + final boolean proxyMode = randomBoolean(); + if (proxyMode) { + settings.put("cluster.remote." + clusterAlias + ".mode", "proxy") + .put("cluster.remote." + clusterAlias + ".proxy_address", address); + } else { + settings.put("cluster.remote." + clusterAlias + ".seeds", address); + } + return settings.build(); + } + public void testLogsConnectionResult() throws IOException { try ( diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 221e57d913dc7..93986aefe9f25 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -118,6 +118,7 @@ public void testSendMessage() throws InterruptedException { assertEquals(request.sourceNode, "TS_A"); final SimpleTestResponse response = new SimpleTestResponse("TS_A"); channel.sendResponse(response); + response.decRef(); assertThat(response.hasReferences(), equalTo(false)); } ); @@ -134,6 +135,7 @@ public void testSendMessage() throws InterruptedException { assertEquals(request.sourceNode, "TS_A"); final SimpleTestResponse response = new SimpleTestResponse("TS_B"); channel.sendResponse(response); + response.decRef(); assertThat(response.hasReferences(), equalTo(false)); } ); @@ -148,6 +150,7 @@ public void testSendMessage() throws InterruptedException { assertEquals(request.sourceNode, "TS_A"); final SimpleTestResponse response = new SimpleTestResponse("TS_C"); channel.sendResponse(response); + response.decRef(); assertThat(response.hasReferences(), equalTo(false)); } ); @@ -176,7 +179,7 @@ public SimpleTestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -228,7 +231,7 @@ public SimpleTestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -294,7 +297,7 @@ public SimpleTestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -319,8 +322,10 @@ public void handleException(TransportException exp) { ); latch.await(); - assertThat(response.get(), notNullValue()); - assertBusy(() -> assertThat(response.get().hasReferences(), equalTo(false))); + final var responseInstance = response.get(); + assertThat(responseInstance, notNullValue()); + responseInstance.decRef(); + assertBusy(() -> assertThat(responseInstance.hasReferences(), equalTo(false))); } public void testException() throws InterruptedException { @@ -372,7 +377,7 @@ public SimpleTestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -439,10 +444,10 @@ public boolean shouldCancelChildrenOnCancellation() { public static class SimpleTestResponse extends TransportResponse { final String targetNode; - final RefCounted refCounted = new AbstractRefCounted() { + final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { @Override protected void closeInternal() {} - }; + }); SimpleTestResponse(String targetNode) { this.targetNode = targetNode; diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceDeserializationFailureTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceDeserializationFailureTests.java index 2eb77c706a3a2..a3b44c702e692 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceDeserializationFailureTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceDeserializationFailureTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransport; -import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.List; @@ -91,7 +90,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req TransportRequestOptions.EMPTY, new TransportResponseHandler() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -157,7 +156,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, TransportRequestOptions.EMPTY, new TransportResponseHandler() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java index 2c10f47955c4c..87fbf113fc1c9 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java @@ -10,26 +10,42 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; import java.util.Random; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.startsWith; public class TransportServiceLifecycleTests extends ESTestCase { @@ -82,8 +98,8 @@ public TransportResponse.Empty read(StreamInput in) { } @Override - public Executor executor(ThreadPool threadPool) { - return threadPool.executor(executor); + public Executor executor() { + return nodeB.transportService.getThreadPool().executor(executor); } } ); @@ -107,14 +123,127 @@ public Executor executor(ThreadPool threadPool) { } } + public void testInternalSendExceptionForksToHandlerExecutor() { + final var deterministicTaskQueue = new DeterministicTaskQueue(); + + try (var nodeA = new TestNode("node-A")) { + final var future = new PlainActionFuture(); + nodeA.transportService.sendRequest( + nodeA.getThrowingConnection(), + TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + new TransportRequest.Empty(), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(future, unusedReader(), deterministicTaskQueue::scheduleNow) + ); + + assertFalse(future.isDone()); + assertTrue(deterministicTaskQueue.hasRunnableTasks()); + deterministicTaskQueue.runAllRunnableTasks(); + assertTrue(future.isDone()); + assertEquals("simulated exception in sendRequest", getSendRequestException(future, IOException.class).getMessage()); + } + } + + public void testInternalSendExceptionForksToGenericIfHandlerDoesNotFork() { + try (var nodeA = new TestNode("node-A")) { + final var future = new PlainActionFuture(); + nodeA.transportService.sendRequest( + nodeA.getThrowingConnection(), + TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + new TransportRequest.Empty(), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { + assertThat(Thread.currentThread().getName(), containsString("[" + ThreadPool.Names.GENERIC + "]")); + l.onFailure(e); + }), unusedReader(), EsExecutors.DIRECT_EXECUTOR_SERVICE) + ); + + assertEquals("simulated exception in sendRequest", getSendRequestException(future, IOException.class).getMessage()); + } + } + + public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { + try (var nodeA = new TestNode("node-A")) { + final var blockingLatch = new CountDownLatch(1); + final var executor = nodeA.threadPool.executor(Executors.FIXED_BOUNDED_QUEUE); + while (true) { + try { + executor.execute(() -> safeAwait(blockingLatch)); + } catch (EsRejectedExecutionException e) { + break; + } + } + + final var future = new PlainActionFuture(); + try { + nodeA.transportService.sendRequest( + nodeA.getThrowingConnection(), + TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + new TransportRequest.Empty(), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { + assertThat(Thread.currentThread().getName(), containsString("[" + Executors.FIXED_BOUNDED_QUEUE + "]")); + l.onFailure(e); + }), unusedReader(), executor) + ); + + assertFalse(future.isDone()); + } finally { + blockingLatch.countDown(); + } + assertEquals("simulated exception in sendRequest", getSendRequestException(future, IOException.class).getMessage()); + } + } + + public void testInternalSendExceptionCompletesHandlerOnCallingThreadIfTransportServiceClosed() { + final var nodeA = new TestNode("node-A"); + final var executor = nodeA.threadPool.executor(randomFrom(TestNode.EXECUTOR_NAMES)); + nodeA.close(); + + final var testThread = Thread.currentThread(); + final var future = new PlainActionFuture(); + nodeA.transportService.sendRequest( + nodeA.getThrowingConnection(), + TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + new TransportRequest.Empty(), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { + assertSame(testThread, Thread.currentThread()); + l.onFailure(e); + }), unusedReader(), executor) + ); + + assertTrue(future.isDone()); + assertThat(getSendRequestException(future, NodeClosedException.class).getMessage(), startsWith("node closed")); + } + + private static Writeable.Reader unusedReader() { + return in -> fail(null, "should not be used"); + } + + private static E getSendRequestException(Future future, Class exceptionClass) { + return asInstanceOf( + exceptionClass, + expectThrows(ExecutionException.class, SendRequestTransportException.class, () -> future.get(10, TimeUnit.SECONDS)).getCause() + ); + } + + private static class Executors { + static final String SCALING_DROP_ON_SHUTDOWN = "scaling-drop-on-shutdown"; + static final String SCALING_REJECT_ON_SHUTDOWN = "scaling-reject-on-shutdown"; + static final String FIXED_BOUNDED_QUEUE = "fixed-bounded-queue"; + static final String FIXED_UNBOUNDED_QUEUE = "fixed-unbounded-queue"; + } + private static class TestNode implements Releasable { static final String ACTION_NAME_PREFIX = "internal:test/"; static final String[] EXECUTOR_NAMES = new String[] { ThreadPool.Names.SAME, - ThreadPool.Names.GENERIC, - ThreadPool.Names.CLUSTER_COORDINATION, - ThreadPool.Names.SEARCH }; + Executors.SCALING_DROP_ON_SHUTDOWN, + Executors.SCALING_REJECT_ON_SHUTDOWN, + Executors.FIXED_BOUNDED_QUEUE, + Executors.FIXED_UNBOUNDED_QUEUE }; final ThreadPool threadPool; final TransportService transportService; @@ -122,8 +251,24 @@ private static class TestNode implements Releasable { TestNode(String nodeName) { threadPool = new TestThreadPool( nodeName, - // tiny search thread pool & queue to trigger non-shutdown-related rejections - Settings.builder().put("thread_pool.search.size", 2).put("thread_pool.search.queue_size", 5).build() + new ScalingExecutorBuilder(Executors.SCALING_DROP_ON_SHUTDOWN, 3, 3, TimeValue.timeValueSeconds(60), false), + new ScalingExecutorBuilder(Executors.SCALING_REJECT_ON_SHUTDOWN, 3, 3, TimeValue.timeValueSeconds(60), true), + new FixedExecutorBuilder( + Settings.EMPTY, + Executors.FIXED_BOUNDED_QUEUE, + 2, + 5, + Executors.FIXED_BOUNDED_QUEUE, + randomFrom(EsExecutors.TaskTrackingConfig.DO_NOT_TRACK, EsExecutors.TaskTrackingConfig.DEFAULT) + ), + new FixedExecutorBuilder( + Settings.EMPTY, + Executors.FIXED_UNBOUNDED_QUEUE, + 2, + -1, + Executors.FIXED_UNBOUNDED_QUEUE, + randomFrom(EsExecutors.TaskTrackingConfig.DO_NOT_TRACK, EsExecutors.TaskTrackingConfig.DEFAULT) + ) ) { @Override public ExecutorService executor(String name) { @@ -172,6 +317,26 @@ public void close() { transportService.close(); ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); } + + Transport.Connection getThrowingConnection() { + return new CloseableConnection() { + @Override + public DiscoveryNode getNode() { + return transportService.getLocalNode(); + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws IOException, TransportException { + throw new IOException("simulated exception in sendRequest"); + } + + @Override + public TransportVersion getTransportVersion() { + return TransportVersion.current(); + } + }; + } } } diff --git a/server/src/test/java/org/elasticsearch/upgrades/FeatureMigrationResultsTests.java b/server/src/test/java/org/elasticsearch/upgrades/FeatureMigrationResultsTests.java index 6dbae94492aaf..b7fff65a19b64 100644 --- a/server/src/test/java/org/elasticsearch/upgrades/FeatureMigrationResultsTests.java +++ b/server/src/test/java/org/elasticsearch/upgrades/FeatureMigrationResultsTests.java @@ -8,17 +8,64 @@ package org.elasticsearch.upgrades; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ChunkedToXContentDiffableSerializationTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; public class FeatureMigrationResultsTests extends ChunkedToXContentDiffableSerializationTestCase { + private static final ConstructingObjectParser SINGLE_FEATURE_RESULT_PARSER = + new ConstructingObjectParser<>( + "feature_migration_status", + a -> new SingleFeatureMigrationResult((boolean) a[0], (String) a[1], (Exception) a[2]) + ); + + static { + SINGLE_FEATURE_RESULT_PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), SingleFeatureMigrationResult.SUCCESS_FIELD); + SINGLE_FEATURE_RESULT_PARSER.declareString( + ConstructingObjectParser.optionalConstructorArg(), + SingleFeatureMigrationResult.FAILED_INDEX_NAME_FIELD + ); + SINGLE_FEATURE_RESULT_PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + SingleFeatureMigrationResult.EXCEPTION_FIELD + ); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + FeatureMigrationResults.TYPE, + a -> { + final Map statuses = ((List>) a[0]).stream() + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); + return new FeatureMigrationResults(statuses); + } + ); + + static { + PARSER.declareNamedObjects( + ConstructingObjectParser.constructorArg(), + (p, c, n) -> new Tuple<>(n, SINGLE_FEATURE_RESULT_PARSER.apply(p, c)), + v -> { + throw new IllegalArgumentException( + "ordered " + FeatureMigrationResults.RESULTS_FIELD.getPreferredName() + " are not supported" + ); + }, + FeatureMigrationResults.RESULTS_FIELD + ); + } + @Override protected FeatureMigrationResults createTestInstance() { return new FeatureMigrationResults(randomMap(0, 10, () -> new Tuple<>(randomAlphaOfLength(5), randomFeatureStatus()))); @@ -60,7 +107,7 @@ protected Writeable.Reader instanceReader() { @Override protected FeatureMigrationResults doParseInstance(XContentParser parser) throws IOException { - return FeatureMigrationResults.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/resources/org/elasticsearch/ReleaseVersionsTests.csv b/server/src/test/resources/org/elasticsearch/ReleaseVersionsTests.csv new file mode 100644 index 0000000000000..33d58941baa44 --- /dev/null +++ b/server/src/test/resources/org/elasticsearch/ReleaseVersionsTests.csv @@ -0,0 +1,5 @@ +8.0.0,10 +8.1.0,14 +8.1.1,14 +8.2.0,21 +8.2.1,22 diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index b3df1d622af54..3ae62c72968b3 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -16,15 +16,10 @@ tasks.named("test").configure { enabled = false } -tasks.named("yamlRestTest").configure { - enabled = false -} - tasks.named('javaRestTest').configure { it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } } - dependencies { clusterModules project(':modules:apm') } diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java index ddcd667b9cbe7..70ce86a1d91a6 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.spi.XContentProvider; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; import org.hamcrest.StringDescription; import org.junit.ClassRule; import org.junit.Rule; @@ -107,8 +106,10 @@ public void testApmIntegration() throws Exception { client().performRequest(new Request("GET", "/_use_apm_metrics")); - assertTrue("Timeout when waiting for assertions to complete.", finished.await(30, TimeUnit.SECONDS)); - assertThat(sampleAssertions, Matchers.equalTo(Collections.emptyMap())); + assertTrue( + "Timeout when waiting for assertions to complete. Remaining assertions to match: " + sampleAssertions, + finished.await(30, TimeUnit.SECONDS) + ); } private Map.Entry>> assertion( diff --git a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java index 6afd5120c17fe..7ecdf253364f4 100644 --- a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java +++ b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/ApmIntegrationPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -30,6 +31,7 @@ public class ApmIntegrationPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( final Settings settings, + NamedWriteableRegistry namedWriteableRegistry, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index b0deae90b40d9..a663006a57236 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -3,15 +3,10 @@ import org.elasticsearch.gradle.internal.info.BuildParams subprojects { apply plugin: 'elasticsearch.base-internal-es-plugin' - apply plugin: 'elasticsearch.legacy-yaml-rest-test' esplugin { name it.name licenseFile rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt') noticeFile rootProject.file('NOTICE.txt') } - - tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } - } } diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index 88a1fe5568c66..11461beeaad7d 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -5,6 +5,13 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +tasks.named('yamlRestTest').configure { + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } +} esplugin { description 'A test module that allows to delay aggregations on shards with a configurable time' diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 999b81af027b3..34a9a71533d3c 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -16,10 +16,6 @@ tasks.named("test").configure { enabled = false } -tasks.named("yamlRestTest").configure { - enabled = false -} - tasks.named('javaRestTest').configure { it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } } diff --git a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index b313b87fc2153..a176a962adc80 100644 --- a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -77,7 +77,7 @@ public void testDieWithDignity() throws Exception { } private Process startJcmd(long pid) throws IOException { - final String jcmdPath = PathUtils.get(System.getProperty("tests.runtime.java"), "bin/jcmd").toString(); + final String jcmdPath = PathUtils.get(System.getProperty("java.home"), "bin/jcmd").toString(); return new ProcessBuilder().command(jcmdPath, Long.toString(pid), "VM.command_line").redirectErrorStream(true).start(); } diff --git a/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java b/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java index 96e48b1c3fd58..c974551fbbc15 100644 --- a/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java +++ b/test/external-modules/die-with-dignity/src/main/java/org/elasticsearch/test/diewithdignity/DieWithDignityPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -28,6 +29,7 @@ public class DieWithDignityPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( final Settings settings, + NamedWriteableRegistry namedWriteableRegistry, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index c9b8ab9a4dfd5..328c0e3e20f50 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -6,6 +6,13 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +tasks.named('yamlRestTest').configure { + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } +} + esplugin { description 'A test module that exposes a way to simulate search shard failures and warnings' classname 'org.elasticsearch.test.errorquery.ErrorQueryPlugin' diff --git a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java index 92b05ec9bf649..0b1ed05039a6d 100644 --- a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java +++ b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java @@ -53,7 +53,7 @@ public IndexError(StreamInput in) throws IOException { this.shardIds = in.readBoolean() ? in.readIntArray() : null; this.errorType = in.readEnum(ERROR_TYPE.class); this.message = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.stallTimeSeconds = in.readVInt(); } else { this.stallTimeSeconds = 0; @@ -69,7 +69,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeEnum(errorType); out.writeString(message); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeVInt(stallTimeSeconds); } } diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle new file mode 100644 index 0000000000000..3a95f3f0b59c8 --- /dev/null +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.internal-java-rest-test' +// Necessary to use tests in Serverless +apply plugin: 'elasticsearch.internal-test-artifact' + +group = 'org.elasticsearch.plugin' + +esplugin { + description 'A test module that can trigger out of memory' + classname 'org.elasticsearch.test.esql.heap_attack.HeapAttackPlugin' +} + +tasks.named('javaRestTest') { + usesDefaultDistribution() + it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } +} diff --git a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java similarity index 85% rename from x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java rename to test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 0f5dd72feafbb..8d4b5ece98993 100644 --- a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -5,27 +5,34 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.qa.heap_attack; +package org.elasticsearch.xpack.esql.heap_attack; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -39,6 +46,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.common.Strings.hasText; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -51,21 +59,28 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103527") public class HeapAttackIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .module("test-esql-heap-attack") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .build(); + static volatile boolean SUITE_ABORTED = false; + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); } + @Before + public void skipOnAborted() { + assumeFalse("skip on aborted", SUITE_ABORTED); + } + /** * This used to fail, but we've since compacted top n so it actually succeeds now. */ @@ -265,7 +280,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100528") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(1000)); @@ -296,10 +310,48 @@ private Response query(String query, String filterPath) throws IOException { request.setJsonEntity(query.toString().replace("\n", "\\n")); request.setOptions( RequestOptions.DEFAULT.toBuilder() - .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(TimeValue.timeValueMinutes(5).millis())).build()) + .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(TimeValue.timeValueMinutes(6).millis())).build()) .setWarningsHandler(WarningsHandler.PERMISSIVE) ); - return client().performRequest(request); + logger.info("--> test {} started querying", getTestName()); + final ThreadPool testThreadPool = new TestThreadPool(getTestName()); + final long startedTimeInNanos = System.nanoTime(); + Scheduler.Cancellable schedule = null; + try { + schedule = testThreadPool.schedule(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + + @Override + protected void doRun() throws Exception { + SUITE_ABORTED = true; + TimeValue elapsed = TimeValue.timeValueNanos(System.nanoTime() - startedTimeInNanos); + logger.info("--> test {} triggering OOM after {}", getTestName(), elapsed); + Request triggerOOM = new Request("POST", "/_trigger_out_of_memory"); + RequestConfig requestConfig = RequestConfig.custom() + .setSocketTimeout(Math.toIntExact(TimeValue.timeValueMinutes(2).millis())) + .build(); + request.setOptions(RequestOptions.DEFAULT.toBuilder().setRequestConfig(requestConfig)); + client().performRequest(triggerOOM); + } + }, TimeValue.timeValueMinutes(5), testThreadPool.executor(ThreadPool.Names.GENERIC)); + Response resp = client().performRequest(request); + logger.info("--> test {} completed querying", getTestName()); + return resp; + } finally { + if (schedule != null) { + schedule.cancel(); + } + terminate(testThreadPool); + } + } + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { + settings = Settings.builder().put(settings).put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "6m").build(); + return super.buildClient(settings, hosts); } public void testFetchManyBigFields() throws IOException { @@ -387,6 +439,8 @@ private void initManyLongs() throws IOException { } } } + bulk("manylongs", bulk.toString()); + bulk.setLength(0); } initIndex("manylongs", bulk.toString()); } @@ -484,7 +538,9 @@ private void bulk(String name, String bulk) throws IOException { } private void initIndex(String name, String bulk) throws IOException { - bulk(name, bulk); + if (hasText(bulk)) { + bulk(name, bulk); + } Request request = new Request("POST", "/" + name + "/_refresh"); Response response = client().performRequest(request); @@ -510,6 +566,19 @@ private static void assertWriteResponse(Response response) throws IOException { @Before @After public void assertRequestBreakerEmpty() throws Exception { - EsqlSpecTestCase.assertRequestBreakerEmpty(); + if (SUITE_ABORTED) { + return; + } + assertBusy(() -> { + HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); + Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); + Map nodes = (Map) stats.get("nodes"); + for (Object n : nodes.values()) { + Map node = (Map) n; + Map breakers = (Map) node.get("breakers"); + Map request = (Map) breakers.get("request"); + assertMap(request, matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b")); + } + }); } } diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java new file mode 100644 index 0000000000000..77e0c3b3e0821 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/HeapAttackPlugin.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.esql.heap_attack; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; + +import java.util.List; +import java.util.function.Supplier; + +public class HeapAttackPlugin extends Plugin implements ActionPlugin { + @Override + public List getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of(new RestTriggerOutOfMemoryAction()); + } +} diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java new file mode 100644 index 0000000000000..d0a146edde765 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.esql.heap_attack; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestTriggerOutOfMemoryAction extends BaseRestHandler { + private static final Logger LOGGER = LogManager.getLogger(RestTriggerOutOfMemoryAction.class); + + @Override + public String getName() { + return "trigger_out_of_memory"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/_trigger_out_of_memory")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + LOGGER.error("triggering out of memory"); + List values = new ArrayList<>(); + return channel -> { + while (true) { + values.add(new int[1024 * 1024]); + } + }; + } +} diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java index aa9ff52b00824..bb88aad387a0d 100644 --- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java +++ b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -38,7 +39,7 @@ public class SeekTrackerPlugin extends Plugin implements ActionPlugin { Setting.Property.NodeScope ); - public static final ActionType SEEK_STATS_ACTION = ActionType.localOnly("cluster:monitor/seek_stats"); + public static final ActionType SEEK_STATS_ACTION = new ActionType<>("cluster:monitor/seek_stats"); private final SeekStatsService seekStatsService = new SeekStatsService(); private final boolean enabled; @@ -70,6 +71,7 @@ public void onIndexModule(IndexModule indexModule) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java index ce4d6fda861cd..be8d597c26010 100644 --- a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java @@ -16,7 +16,6 @@ import org.testcontainers.DockerClientFactory; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.images.builder.ImageFromDockerfile; import java.io.File; import java.io.IOException; @@ -27,6 +26,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.Future; import java.util.stream.Collectors; public abstract class DockerEnvironmentAwareTestContainer extends GenericContainer @@ -56,8 +56,8 @@ private static boolean isDockerAvailable() { } } - public DockerEnvironmentAwareTestContainer(ImageFromDockerfile imageFromDockerfile) { - super(imageFromDockerfile); + public DockerEnvironmentAwareTestContainer(Future image) { + super(image); } @Override diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 2cdd6994bd2e6..22a95880193e2 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -26,9 +26,9 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" // mockito - api 'org.mockito:mockito-core:5.4.0' - api 'org.mockito:mockito-subclass:5.4.0' - api 'net.bytebuddy:byte-buddy:1.14.5' + api 'org.mockito:mockito-core:5.9.0' + api 'org.mockito:mockito-subclass:5.9.0' + api 'net.bytebuddy:byte-buddy:1.14.11' api 'org.objenesis:objenesis:3.3' api "org.elasticsearch:mocksocket:${versions.mocksocket}" diff --git a/test/framework/src/integTest/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java b/test/framework/src/integTest/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java index 88958063dbbf3..a9196a3bb1377 100644 --- a/test/framework/src/integTest/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java +++ b/test/framework/src/integTest/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java @@ -19,7 +19,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; @@ -170,7 +169,7 @@ private static void sendRequest(TransportService source, TransportService target private AtomicBoolean responded = new AtomicBoolean(); @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 756757a162568..0f60ba9731966 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -43,6 +43,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.test.ESTestCase.indexSettings; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomFrom; @@ -174,7 +175,10 @@ public static ClusterState state( unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, true, primaryState, unassignedInfo, primaryRole) + shardRoutingBuilder(index, 0, primaryNode, true, primaryState).withRelocatingNodeId(relocatingNode) + .withUnassignedInfo(unassignedInfo) + .withRole(primaryRole) + .build() ); for (var replicaState : replicaStates) { @@ -191,16 +195,10 @@ public static ClusterState state( unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting( - index, - shardId.id(), - replicaNode, - relocatingNode, - false, - replicaState.v1(), - unassignedInfo, - replicaState.v2() - ) + shardRoutingBuilder(index, shardId.id(), replicaNode, false, replicaState.v1()).withRelocatingNodeId(relocatingNode) + .withUnassignedInfo(unassignedInfo) + .withRole(replicaState.v2()) + .build() ); } final IndexShardRoutingTable indexShardRoutingTable = indexShardRoutingBuilder.build(); @@ -399,15 +397,9 @@ public static ClusterState stateWithAssignedPrimariesAndReplicas( ); for (int replica = 0; replica < replicaRoles.size(); replica++) { indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting( - index, - i, - newNode(replica + 1).getId(), - null, - false, - ShardRoutingState.STARTED, + shardRoutingBuilder(index, i, newNode(replica + 1).getId(), false, ShardRoutingState.STARTED).withRole( replicaRoles.get(replica) - ) + ).build() ); } indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder); diff --git a/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java b/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java new file mode 100644 index 0000000000000..3f2569844673c --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.client.internal; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.transport.TransportResponse; + +/** + * A fake {@link RemoteClusterClient} which just runs actions on the local cluster, like a {@link NodeClient}, for use in tests. + */ +public class RedirectToLocalClusterRemoteClusterClient implements RemoteClusterClient { + + private final ElasticsearchClient delegate; + + public RedirectToLocalClusterRemoteClusterClient(ElasticsearchClient delegate) { + this.delegate = delegate; + } + + @SuppressWarnings("unchecked") + @Override + public void execute( + RemoteClusterActionType action, + Request request, + ActionListener listener + ) { + delegate.execute(new ActionType(action.name()), request, listener.map(r -> (Response) r)); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 611f2ab9f5749..1d76c1e40910e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -1181,8 +1181,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { transportService.getTaskManager(), localNode::getId, transportService.getLocalNodeConnection(), - null, - getNamedWriteableRegistry() + null ); stableMasterHealthIndicatorService = new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService); masterService.setClusterStatePublisher(coordinator); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 5c5123e03454f..d0b30bff92f3e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -68,6 +68,7 @@ import static org.elasticsearch.cluster.metadata.DataStream.BACKING_INDEX_PREFIX; import static org.elasticsearch.cluster.metadata.DataStream.DATE_FORMATTER; import static org.elasticsearch.cluster.metadata.DataStream.getDefaultBackingIndexName; +import static org.elasticsearch.cluster.metadata.DataStream.getDefaultFailureStoreName; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID; import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; @@ -111,7 +112,19 @@ public static DataStream newInstance( boolean replicated, @Nullable DataStreamLifecycle lifecycle ) { - return new DataStream(name, indices, generation, metadata, false, replicated, false, false, null, lifecycle, false, List.of()); + return newInstance(name, indices, generation, metadata, replicated, lifecycle, List.of()); + } + + public static DataStream newInstance( + String name, + List indices, + long generation, + Map metadata, + boolean replicated, + @Nullable DataStreamLifecycle lifecycle, + List failureStores + ) { + return new DataStream(name, indices, generation, metadata, false, replicated, false, false, null, lifecycle, false, failureStores); } public static String getLegacyDefaultBackingIndexName( @@ -318,9 +331,21 @@ public static ClusterState getClusterStateWithDataStreams( Settings settings, int replicas, boolean replicated + ) { + return getClusterStateWithDataStreams(dataStreams, indexNames, currentTime, settings, replicas, replicated, false); + } + + public static ClusterState getClusterStateWithDataStreams( + List> dataStreams, + List indexNames, + long currentTime, + Settings settings, + int replicas, + boolean replicated, + boolean storeFailures ) { Metadata.Builder builder = Metadata.builder(); - getClusterStateWithDataStreams(builder, dataStreams, indexNames, currentTime, settings, replicas, replicated); + getClusterStateWithDataStreams(builder, dataStreams, indexNames, currentTime, settings, replicas, replicated, storeFailures); return ClusterState.builder(new ClusterName("_name")).metadata(builder).build(); } @@ -331,13 +356,16 @@ public static void getClusterStateWithDataStreams( long currentTime, Settings settings, int replicas, - boolean replicated + boolean replicated, + boolean storeFailures ) { builder.put( "template_1", ComposableIndexTemplate.builder() .indexPatterns(List.of("*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .dataStreamTemplate( + new ComposableIndexTemplate.DataStreamTemplate(false, false, DataStream.isFailureStoreEnabled() && storeFailures) + ) .build() ); @@ -351,12 +379,29 @@ public static void getClusterStateWithDataStreams( } allIndices.addAll(backingIndices); + List failureStores = new ArrayList<>(); + if (DataStream.isFailureStoreEnabled() && storeFailures) { + for (int failureStoreNumber = 1; failureStoreNumber <= dsTuple.v2(); failureStoreNumber++) { + failureStores.add( + createIndexMetadata( + getDefaultFailureStoreName(dsTuple.v1(), failureStoreNumber, currentTime), + true, + settings, + replicas + ) + ); + } + allIndices.addAll(failureStores); + } + DataStream ds = DataStreamTestHelper.newInstance( dsTuple.v1(), backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()), dsTuple.v2(), null, - replicated + replicated, + null, + failureStores.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()) ); builder.put(ds); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 1810b5cee76ec..b032be87a642b 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -28,193 +28,124 @@ /** * A helper that allows to create shard routing instances within tests, while not requiring to expose * different simplified constructors on the ShardRouting itself. + * + * Please do not add more `newShardRouting`, consider using a aSharRouting builder instead */ public class TestShardRouting { - public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state) { - return newShardRouting(new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), currentNodeId, primary, state); + public static Builder shardRoutingBuilder(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + return shardRoutingBuilder(new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), currentNodeId, primary, state); } - public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { - return newShardRouting(shardId, currentNodeId, primary, state, -1); + public static Builder shardRoutingBuilder(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + return new Builder(shardId, currentNodeId, primary, state); } - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - boolean primary, - ShardRoutingState state, - long expectedShardSize - ) { - assertNotEquals(ShardRoutingState.RELOCATING, state); - return new ShardRouting( - shardId, - currentNodeId, - null, - primary, - state, - buildRecoveryTarget(primary, state), - buildUnassignedInfo(state), - buildRelocationFailureInfo(state), - buildAllocationId(state), - expectedShardSize, - ShardRouting.Role.DEFAULT - ); - } + public static class Builder { - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - boolean primary, - ShardRoutingState state, - RecoverySource recoverySource - ) { - return new ShardRouting( - shardId, - currentNodeId, - null, - primary, - state, - recoverySource, - buildUnassignedInfo(state), - buildRelocationFailureInfo(state), - buildAllocationId(state), - -1, - ShardRouting.Role.DEFAULT - ); - } + private final ShardId shardId; + private String currentNodeId; + private String relocatingNodeId; + private boolean primary; + private ShardRoutingState state; + private RecoverySource recoverySource; + private UnassignedInfo unassignedInfo; + private RelocationFailureInfo relocationFailureInfo; + private AllocationId allocationId; + private Long expectedShardSize; + private ShardRouting.Role role; - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - RecoverySource recoverySource - ) { - return new ShardRouting( - shardId, - currentNodeId, - relocatingNodeId, - primary, - state, - recoverySource, - buildUnassignedInfo(state), - buildRelocationFailureInfo(state), - buildAllocationId(state), - -1, - ShardRouting.Role.DEFAULT - ); - } + public Builder(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + this.shardId = shardId; + this.currentNodeId = currentNodeId; + this.primary = primary; + this.state = state; + } - public static ShardRouting newShardRouting( - String index, - int shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state - ) { - return newShardRouting( - new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), - currentNodeId, - relocatingNodeId, - primary, - state, - ShardRouting.Role.DEFAULT - ); - } + public Builder withCurrentNodeId(String currentNodeId) { + this.currentNodeId = currentNodeId; + return this; + } - public static ShardRouting newShardRouting( - String index, - int shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - ShardRouting.Role role - ) { - return newShardRouting( - new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), - currentNodeId, - relocatingNodeId, - primary, - state, - role - ); - } + public Builder withRelocatingNodeId(String relocatingNodeId) { + this.relocatingNodeId = relocatingNodeId; + return this; + } - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state - ) { - return newShardRouting(shardId, currentNodeId, relocatingNodeId, primary, state, ShardRouting.Role.DEFAULT); - } + public Builder withPrimary(boolean primary) { + this.primary = primary; + return this; + } - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - ShardRouting.Role role - ) { - return new ShardRouting( - shardId, - currentNodeId, - relocatingNodeId, - primary, - state, - buildRecoveryTarget(primary, state), - buildUnassignedInfo(state), - buildRelocationFailureInfo(state), - buildAllocationId(state), - -1, - role - ); + public Builder withState(ShardRoutingState state) { + this.state = state; + return this; + } + + public Builder withRecoverySource(RecoverySource recoverySource) { + this.recoverySource = recoverySource; + return this; + } + + public Builder withUnassignedInfo(UnassignedInfo unassignedInfo) { + this.unassignedInfo = unassignedInfo; + return this; + } + + public Builder withRelocationFailureInfo(RelocationFailureInfo relocationFailureInfo) { + this.relocationFailureInfo = relocationFailureInfo; + return this; + } + + public Builder withAllocationId(AllocationId allocationId) { + this.allocationId = allocationId; + return this; + } + + public Builder withExpectedShardSize(Long expectedShardSize) { + this.expectedShardSize = expectedShardSize; + return this; + } + + public Builder withRole(ShardRouting.Role role) { + this.role = role; + return this; + } + + public ShardRouting build() { + return new ShardRouting( + shardId, + currentNodeId, + relocatingNodeId, + primary, + state, + recoverySource != null ? recoverySource : buildRecoverySource(primary, state), + unassignedInfo != null ? unassignedInfo : buildUnassignedInfo(state), + relocationFailureInfo != null ? relocationFailureInfo : buildRelocationFailureInfo(state), + allocationId != null ? allocationId : buildAllocationId(state), + expectedShardSize != null ? expectedShardSize : ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, + role != null ? role : ShardRouting.Role.DEFAULT + ); + } } - public static ShardRouting newShardRouting( - String index, - int shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - AllocationId allocationId - ) { - return newShardRouting( - new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), - currentNodeId, - relocatingNodeId, - primary, - state, - allocationId - ); + public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + return newShardRouting(new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), currentNodeId, primary, state); } - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - AllocationId allocationId - ) { + public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + assertNotEquals(ShardRoutingState.RELOCATING, state); return new ShardRouting( shardId, currentNodeId, - relocatingNodeId, + null, primary, state, - buildRecoveryTarget(primary, state), + buildRecoverySource(primary, state), buildUnassignedInfo(state), buildRelocationFailureInfo(state), - allocationId, - -1, + buildAllocationId(state), + ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, ShardRouting.Role.DEFAULT ); } @@ -225,30 +156,14 @@ public static ShardRouting newShardRouting( String currentNodeId, String relocatingNodeId, boolean primary, - ShardRoutingState state, - UnassignedInfo unassignedInfo - ) { - return newShardRouting(index, shardId, currentNodeId, relocatingNodeId, primary, state, unassignedInfo, ShardRouting.Role.DEFAULT); - } - - public static ShardRouting newShardRouting( - String index, - int shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - UnassignedInfo unassignedInfo, - ShardRouting.Role role + ShardRoutingState state ) { return newShardRouting( new ShardId(index, IndexMetadata.INDEX_UUID_NA_VALUE, shardId), currentNodeId, relocatingNodeId, primary, - state, - unassignedInfo, - role + state ); } @@ -257,20 +172,7 @@ public static ShardRouting newShardRouting( String currentNodeId, String relocatingNodeId, boolean primary, - ShardRoutingState state, - UnassignedInfo unassignedInfo - ) { - return newShardRouting(shardId, currentNodeId, relocatingNodeId, primary, state, unassignedInfo, ShardRouting.Role.DEFAULT); - } - - public static ShardRouting newShardRouting( - ShardId shardId, - String currentNodeId, - String relocatingNodeId, - boolean primary, - ShardRoutingState state, - UnassignedInfo unassignedInfo, - ShardRouting.Role role + ShardRoutingState state ) { return new ShardRouting( shardId, @@ -278,20 +180,16 @@ public static ShardRouting newShardRouting( relocatingNodeId, primary, state, - buildRecoveryTarget(primary, state), - unassignedInfo, + buildRecoverySource(primary, state), + buildUnassignedInfo(state), buildRelocationFailureInfo(state), buildAllocationId(state), - -1, - role + ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, + ShardRouting.Role.DEFAULT ); } - public static ShardRouting relocate(ShardRouting shardRouting, String relocatingNodeId, long expectedShardSize) { - return shardRouting.relocate(relocatingNodeId, expectedShardSize); - } - - public static RecoverySource buildRecoveryTarget(boolean primary, ShardRoutingState state) { + public static RecoverySource buildRecoverySource(boolean primary, ShardRoutingState state) { return switch (state) { case UNASSIGNED, INITIALIZING -> primary ? randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, RecoverySource.ExistingStoreRecoverySource.INSTANCE) @@ -310,7 +208,7 @@ public static AllocationId buildAllocationId(ShardRoutingState state) { public static UnassignedInfo buildUnassignedInfo(ShardRoutingState state) { return switch (state) { - case UNASSIGNED, INITIALIZING -> randomUnassignedInfo("auto generated for test"); + case UNASSIGNED, INITIALIZING -> buildUnassignedInfo("auto generated for test"); case STARTED, RELOCATING -> null; }; } @@ -322,7 +220,7 @@ public static RelocationFailureInfo buildRelocationFailureInfo(ShardRoutingState }; } - public static UnassignedInfo randomUnassignedInfo(String message) { + public static UnassignedInfo buildUnassignedInfo(String message) { UnassignedInfo.Reason reason = randomFrom(UnassignedInfo.Reason.values()); String lastAllocatedNodeId = null; boolean delayed = false; @@ -347,7 +245,7 @@ public static UnassignedInfo randomUnassignedInfo(String message) { ); } - public static RecoverySource randomRecoverySource() { + public static RecoverySource buildRecoverySource() { return randomFrom( RecoverySource.EmptyStoreRecoverySource.INSTANCE, RecoverySource.ExistingStoreRecoverySource.INSTANCE, diff --git a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java b/test/framework/src/main/java/org/elasticsearch/common/bytes/ZeroBytesReference.java similarity index 100% rename from server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java rename to test/framework/src/main/java/org/elasticsearch/common/bytes/ZeroBytesReference.java diff --git a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java b/test/framework/src/main/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java similarity index 79% rename from server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java rename to test/framework/src/main/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java index f90cb870ea22a..c3f4510cd144e 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java +++ b/test/framework/src/main/java/org/elasticsearch/common/bytes/ZeroBytesReferenceTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.common.bytes; -import java.io.IOException; - import static org.hamcrest.Matchers.containsString; public class ZeroBytesReferenceTests extends AbstractBytesReferenceTestCase { @@ -39,9 +37,11 @@ public void testSliceToBytesRef() { // ZeroBytesReference shifts offsets } - public void testWriteWithIterator() throws IOException { - AssertionError error = expectThrows(AssertionError.class, () -> super.testWriteWithIterator()); - assertThat(error.getMessage(), containsString("Internal pages from ZeroBytesReference must be zero")); + public void testWriteWithIterator() { + assertThat( + expectThrows(AssertionError.class, super::testWriteWithIterator).getMessage(), + containsString("Internal pages from ZeroBytesReference must be zero") + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTestUtils.java b/test/framework/src/main/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTestUtils.java new file mode 100644 index 0000000000000..a2f361494222a --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTestUtils.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.logging; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; +import org.elasticsearch.common.ReferenceDocs; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.core.Streams; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.io.ByteArrayInputStream; +import java.util.Base64; +import java.util.zip.GZIPInputStream; + +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +/** + * Utility for capturing and decoding the data logged by a {@link ChunkedLoggingStream}. + */ +public class ChunkedLoggingStreamTestUtils { + + private ChunkedLoggingStreamTestUtils() {/* no instances */} + + /** + * Test utility function which captures the logged output from a {@link ChunkedLoggingStream}, combines the chunks, Base64-decodes it + * and Gzip-decompresses it to retrieve the original data. + * + * @param captureLogger The logger whose output should be captured. + * @param level The log level for the data. + * @param prefix The prefix used by the logging stream. + * @param referenceDocs A link to the reference docs about the output. + * @param runnable The action which emits the logs. + * @return A {@link BytesReference} containing the captured data. + */ + public static BytesReference getDecodedLoggedBody( + Logger captureLogger, + Level level, + String prefix, + ReferenceDocs referenceDocs, + CheckedRunnable runnable + ) { + final var loggedBody = getLoggedBody(captureLogger, level, prefix, referenceDocs, runnable); + + try ( + var bytesStreamOutput = new BytesStreamOutput(); + var byteArrayInputStream = new ByteArrayInputStream(Base64.getDecoder().decode(loggedBody)); + var gzipInputStream = new GZIPInputStream(byteArrayInputStream) + ) { + Streams.copy(gzipInputStream, bytesStreamOutput); + return bytesStreamOutput.bytes(); + } catch (Exception e) { + return ESTestCase.fail(e); + } + } + + static String getLoggedBody( + Logger captureLogger, + final Level level, + String prefix, + final ReferenceDocs referenceDocs, + CheckedRunnable runnable + ) { + class ChunkReadingAppender extends AbstractAppender { + final StringBuilder encodedResponseBuilder = new StringBuilder(); + int chunks; + boolean seenTotal; + + ChunkReadingAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + if (event.getLevel() != level) { + return; + } + if (event.getLoggerName().equals(captureLogger.getName()) == false) { + return; + } + Assert.assertFalse(seenTotal); + final var message = event.getMessage().getFormattedMessage(); + final var onePartPrefix = prefix + " (gzip compressed and base64-encoded; for details see " + referenceDocs + "): "; + final var partPrefix = prefix + " [part " + (chunks + 1) + "]: "; + if (message.startsWith(partPrefix)) { + chunks += 1; + final var chunk = message.substring(partPrefix.length()); + ESTestCase.assertThat(chunk.length(), lessThanOrEqualTo(ChunkedLoggingStream.CHUNK_SIZE)); + encodedResponseBuilder.append(chunk); + } else if (message.startsWith(onePartPrefix)) { + Assert.assertEquals(0, chunks); + chunks += 1; + final var chunk = message.substring(onePartPrefix.length()); + ESTestCase.assertThat(chunk.length(), lessThanOrEqualTo(ChunkedLoggingStream.CHUNK_SIZE)); + encodedResponseBuilder.append(chunk); + seenTotal = true; + } else { + Assert.assertEquals( + prefix + + " (gzip compressed, base64-encoded, and split into " + + chunks + + " parts on preceding log lines; for details see " + + referenceDocs + + ")", + message + ); + ESTestCase.assertThat(chunks, greaterThan(1)); + seenTotal = true; + } + } + } + + final var appender = new ChunkReadingAppender(); + try { + appender.start(); + Loggers.addAppender(captureLogger, appender); + runnable.run(); + } catch (Exception e) { + ESTestCase.fail(e); + } finally { + Loggers.removeAppender(captureLogger, appender); + appender.stop(); + } + + ESTestCase.assertThat(appender.chunks, greaterThan(0)); + Assert.assertTrue(appender.seenTotal); + + return appender.encodedResponseBuilder.toString(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/core/ReleasableRef.java b/test/framework/src/main/java/org/elasticsearch/core/ReleasableRef.java new file mode 100644 index 0000000000000..e5f933f57bdb1 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/core/ReleasableRef.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import org.elasticsearch.transport.LeakTracker; + +import java.util.Objects; + +/** + * Adapter to use a {@link RefCounted} in a try-with-resources block. + */ +public final class ReleasableRef implements Releasable { + + private final Releasable closeResource; + private final T resource; + + private ReleasableRef(T resource) { + this.resource = Objects.requireNonNull(resource); + this.closeResource = LeakTracker.wrap(Releasables.assertOnce(resource::decRef)); + } + + @Override + public void close() { + closeResource.close(); + } + + public static ReleasableRef of(T resource) { + return new ReleasableRef<>(resource); + } + + public T get() { + assert resource.hasReferences() : resource + " is closed"; + return resource; + } + + @Override + public String toString() { + return "ReleasableRef[" + resource + ']'; + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 643beda11939c..7693b58bff595 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -422,6 +422,11 @@ public String indexName() { throw new UnsupportedOperationException(); } + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return MappedFieldType.FieldExtractPreference.NONE; + } + @Override public SearchLookup lookup() { return mockContext().lookup(); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index f5ca39899ea65..05aee30799de2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -72,6 +72,8 @@ import java.util.stream.IntStream; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; @@ -1035,8 +1037,8 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { public record SyntheticSourceExample( CheckedConsumer inputValue, - CheckedConsumer result, - CheckedConsumer blockLoaderResult, + CheckedConsumer expectedForSyntheticSource, + CheckedConsumer expectedForBlockLoader, CheckedConsumer mapping ) { public SyntheticSourceExample(Object inputValue, Object result, CheckedConsumer mapping) { @@ -1063,22 +1065,15 @@ private void buildInput(XContentBuilder b) throws IOException { private String expected() throws IOException { XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); - result.accept(b); + expectedForSyntheticSource.accept(b); return Strings.toString(b.endObject()); } - private Object expectedParsed() throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, expected(), false).get("field"); - } - - private String expectedBlockLoader() throws IOException { + private Object expectedParsedForBlockLoader() throws IOException { XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); - blockLoaderResult.accept(b); - return Strings.toString(b.endObject()); - } - - private Object expectedParsedBlockLoader() throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, expectedBlockLoader(), false).get("field"); + expectedForBlockLoader.accept(b); + String str = Strings.toString(b.endObject()); + return XContentHelper.convertToMap(JsonXContent.jsonXContent, str, false).get("field"); } } @@ -1099,6 +1094,10 @@ public interface SyntheticSourceSupport { protected abstract SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed); + protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed, boolean columnReader) { + return syntheticSourceSupport(ignoreMalformed); + } + public final void testSyntheticSource() throws IOException { boolean ignoreMalformed = supportsIgnoreMalformed() ? rarely() : false; assertSyntheticSource(syntheticSourceSupport(ignoreMalformed).example(5)); @@ -1239,76 +1238,109 @@ public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { assertNoDocValueLoader(b -> b.startArray("field").endArray()); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReader() throws IOException { + public final void testBlockLoaderFromColumnReader() throws IOException { testBlockLoader(false, true); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromRowStrideReader() throws IOException { + public final void testBlockLoaderFromRowStrideReader() throws IOException { testBlockLoader(false, false); } - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException { + public final void testBlockLoaderFromColumnReaderWithSyntheticSource() throws IOException { testBlockLoader(true, true); } - // Removed 'final' to silence this test in GeoPointFieldMapperTests, which does not support synthetic source completely - // TextFieldMapperTests @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104152") - public void testBlockLoaderFromRowStrideReaderWithSyntheticSource() throws IOException { + public final void testBlockLoaderFromRowStrideReaderWithSyntheticSource() throws IOException { testBlockLoader(true, false); } - protected boolean supportsColumnAtATimeReader(MapperService mapper, MappedFieldType ft) { - return ft.hasDocValues(); + /** + * Get the configuration for testing block loaders with this field. In particular, not all fields can be loaded from doc-values. + * For most ESQL types the preference is to read from doc-values if they exist, so that is the default behaviour here. + * However, for spatial types, the doc-values involve precision loss, and therefor it is preferable to read from source. + * And for text fields, doc values are not easily convertable to original values either, so special cases exist. + */ + protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) { + MappedFieldType ft = mapper.fieldType(loaderFieldName); + return new BlockReaderSupport(ft.hasDocValues(), true, mapper, loaderFieldName); + } + + /** + * This record encapsulates the test configuration for testing block loaders (used in ES|QL). + * + * @param columnAtATimeReader true if the field supports column at a time readers (doc-values) + * @param syntheticSource true if the field supports synthetic source + * @param mapper the mapper service to use for testing + * @param loaderFieldName the field name to use for loading the field + */ + public record BlockReaderSupport(boolean columnAtATimeReader, boolean syntheticSource, MapperService mapper, String loaderFieldName) { + BlockReaderSupport(boolean columnAtATimeReader, MapperService mapper, String loaderFieldName) { + this(columnAtATimeReader, true, mapper, loaderFieldName); + } + + private BlockLoader getBlockLoader(boolean columnReader) { + SearchLookup searchLookup = new SearchLookup(mapper.mappingLookup().fieldTypesLookup()::get, null, null); + return mapper.fieldType(loaderFieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + throw new UnsupportedOperationException(); + } + + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return columnReader ? DOC_VALUES : NONE; + } + + @Override + public SearchLookup lookup() { + return searchLookup; + } + + @Override + public Set sourcePaths(String name) { + return mapper.mappingLookup().sourcePaths(name); + } + + @Override + public String parentField(String field) { + return mapper.mappingLookup().parentField(field); + } + + @Override + public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { + return (FieldNamesFieldMapper.FieldNamesFieldType) mapper.fieldType(FieldNamesFieldMapper.NAME); + } + }); + } } private void testBlockLoader(boolean syntheticSource, boolean columnReader) throws IOException { // TODO if we're not using synthetic source use a different sort of example. Or something. - SyntheticSourceExample example = syntheticSourceSupport(false).example(5); + SyntheticSourceExample example = syntheticSourceSupport(false, columnReader).example(5); XContentBuilder mapping = syntheticSource ? syntheticSourceFieldMapping(example.mapping) : fieldMapping(example.mapping); MapperService mapper = createMapperService(mapping); - testBlockLoader(columnReader, example, mapper, "field"); + BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field"); + if (syntheticSource) { + // geo_point and point do not yet support synthetic source + assumeTrue( + "Synthetic source not completely supported for " + this.getClass().getSimpleName(), + blockReaderSupport.syntheticSource + ); + } + testBlockLoader(columnReader, example, blockReaderSupport); } - protected final void testBlockLoader(boolean columnReader, SyntheticSourceExample example, MapperService mapper, String loaderFieldName) + protected final void testBlockLoader(boolean columnReader, SyntheticSourceExample example, BlockReaderSupport blockReaderSupport) throws IOException { - SearchLookup searchLookup = new SearchLookup(mapper.mappingLookup().fieldTypesLookup()::get, null, null); - BlockLoader loader = mapper.fieldType(loaderFieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { - @Override - public String indexName() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchLookup lookup() { - return searchLookup; - } - - @Override - public Set sourcePaths(String name) { - return mapper.mappingLookup().sourcePaths(name); - } - - @Override - public String parentField(String field) { - return mapper.mappingLookup().parentField(field); - } - - @Override - public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { - return (FieldNamesFieldMapper.FieldNamesFieldType) mapper.fieldType(FieldNamesFieldMapper.NAME); - } - }); - Function valuesConvert = loadBlockExpected(mapper, loaderFieldName); + BlockLoader loader = blockReaderSupport.getBlockLoader(columnReader); + Function valuesConvert = loadBlockExpected(blockReaderSupport, columnReader); if (valuesConvert == null) { assertNull(loader); return; } try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); - LuceneDocument doc = mapper.documentMapper().parse(source(b -> { + LuceneDocument doc = blockReaderSupport.mapper.documentMapper().parse(source(b -> { b.field("field"); example.inputValue.accept(b); })).rootDoc(); @@ -1318,7 +1350,7 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { LeafReaderContext ctx = reader.leaves().get(0); TestBlock block; if (columnReader) { - if (supportsColumnAtATimeReader(mapper, mapper.fieldType(loaderFieldName))) { + if (blockReaderSupport.columnAtATimeReader) { block = (TestBlock) loader.columnAtATimeReader(ctx) .read(TestBlock.factory(ctx.reader().numDocs()), TestBlock.docs(0)); } else { @@ -1343,8 +1375,7 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { inBlock = valuesConvert.apply(inBlock); } } - // If we're reading from _source we expect the order to be preserved, otherwise it's jumbled. - Object expected = loader instanceof BlockSourceReader ? example.expectedParsed() : example.expectedParsedBlockLoader(); + Object expected = example.expectedParsedForBlockLoader(); if (List.of().equals(expected)) { assertThat(inBlock, nullValue()); return; @@ -1375,10 +1406,20 @@ protected Matcher blockItemMatcher(Object expected) { * How {@link MappedFieldType#blockLoader} should load values or {@code null} * if that method isn't supported by field being tested. */ - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return null; } + /** + * How {@link MappedFieldType#blockLoader} should load values or {@code null} + * if that method isn't supported by field being tested. + * This method should be overridden by fields that support different Block types + * when loading from doc values vs source. + */ + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + return loadBlockExpected(); + } + public final void testEmptyDocumentNoDocValueLoader() throws IOException { assumeFalse("Field will add values even if no fields are supplied", addsValueWhenNotSupplied()); assertNoDocValueLoader(b -> {}); diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index ad515078fc1c5..ba6d7e441ef4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -47,7 +47,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -98,6 +97,7 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -214,13 +214,9 @@ protected ReplicationGroup(final IndexMetadata indexMetadata) throws IOException } private ShardRouting createShardRouting(String nodeId, boolean isPrimary) { - return TestShardRouting.newShardRouting( - shardId, - nodeId, - isPrimary, - ShardRoutingState.INITIALIZING, + return shardRoutingBuilder(shardId, nodeId, isPrimary, ShardRoutingState.INITIALIZING).withRecoverySource( isPrimary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE - ); + ).build(); } protected EngineFactory getEngineFactory(ShardRouting routing) { @@ -349,13 +345,9 @@ protected synchronized void recoverPrimary(IndexShard primaryShard) { } public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardPath, final String nodeId) throws IOException { - final ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - nodeId, - false, - ShardRoutingState.INITIALIZING, - RecoverySource.PeerRecoverySource.INSTANCE - ); + final ShardRouting shardRouting = shardRoutingBuilder(shardId, nodeId, false, ShardRoutingState.INITIALIZING) + .withRecoverySource(RecoverySource.PeerRecoverySource.INSTANCE) + .build(); final IndexShard newReplica = newShard( shardRouting, diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index fb222f67eef69..1c7fb46edc181 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -101,7 +101,7 @@ import java.util.function.LongSupplier; import java.util.stream.Collectors; -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -251,13 +251,9 @@ protected IndexShard newShard( final RecoverySource recoverySource = primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE; - final ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - randomAlphaOfLength(10), - primary, - ShardRoutingState.INITIALIZING, - recoverySource - ); + final ShardRouting shardRouting = shardRoutingBuilder(shardId, randomAlphaOfLength(10), primary, ShardRoutingState.INITIALIZING) + .withRecoverySource(recoverySource) + .build(); return newShard(shardRouting, settings, engineFactory, listeners); } @@ -311,13 +307,9 @@ protected IndexShard newShard( * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardId shardId, boolean primary, IndexingOperationListener... listeners) throws IOException { - ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - randomAlphaOfLength(5), - primary, - ShardRoutingState.INITIALIZING, - primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE - ); + ShardRouting shardRouting = shardRoutingBuilder(shardId, randomAlphaOfLength(5), primary, ShardRoutingState.INITIALIZING) + .withRecoverySource(primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE) + .build(); return newShard(shardRouting, Settings.EMPTY, new InternalEngineFactory(), listeners); } @@ -355,13 +347,9 @@ protected IndexShard newShard( @Nullable CheckedFunction readerWrapper, GlobalCheckpointSyncer globalCheckpointSyncer ) throws IOException { - ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - nodeId, - primary, - ShardRoutingState.INITIALIZING, + ShardRouting shardRouting = shardRoutingBuilder(shardId, nodeId, primary, ShardRoutingState.INITIALIZING).withRecoverySource( primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE - ); + ).build(); return newShard( shardRouting, indexMetadata, @@ -811,7 +799,7 @@ protected final void recoverUnstartedReplica( } replica.prepareForIndexRecovery(); final RecoveryTarget recoveryTarget = targetSupplier.apply(replica, pNode); - final long startingSeqNo = recoveryTarget.indexShard().recoverLocallyUpToGlobalCheckpoint(); + final long startingSeqNo = recoverLocallyUpToGlobalCheckpoint(recoveryTarget.indexShard()); final StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest( logger, rNode, @@ -891,14 +879,12 @@ protected void startReplicaAfterRecovery( */ protected void promoteReplica(IndexShard replica, Set inSyncIds, IndexShardRoutingTable routingTable) throws IOException { assertThat(inSyncIds, contains(replica.routingEntry().allocationId().getId())); - final ShardRouting routingEntry = newShardRouting( + final ShardRouting routingEntry = shardRoutingBuilder( replica.routingEntry().shardId(), replica.routingEntry().currentNodeId(), - null, true, - ShardRoutingState.STARTED, - replica.routingEntry().allocationId() - ); + ShardRoutingState.STARTED + ).withAllocationId(replica.routingEntry().allocationId()).build(); final IndexShardRoutingTable newRoutingTable = new IndexShardRoutingTable.Builder(routingTable).removeShard( routingTable.primaryShard() @@ -1083,7 +1069,9 @@ protected void recoverShardFromSnapshot(final IndexShard shard, final Snapshot s version, indexId ); - final ShardRouting shardRouting = newShardRouting(shardId, node.getId(), true, ShardRoutingState.INITIALIZING, recoverySource); + final ShardRouting shardRouting = shardRoutingBuilder(shardId, node.getId(), true, ShardRoutingState.INITIALIZING) + .withRecoverySource(recoverySource) + .build(); shard.markAsRecovering("from snapshot", new RecoveryState(shardRouting, node, null)); final PlainActionFuture future = new PlainActionFuture<>(); repository.restoreShard(shard.store(), snapshot.getSnapshotId(), indexId, shard.shardId(), shard.recoveryState(), future); @@ -1158,4 +1146,8 @@ public static Engine.Warmer createTestWarmer(IndexSettings indexSettings) { } }; } + + public static long recoverLocallyUpToGlobalCheckpoint(IndexShard indexShard) { + return PlainActionFuture.get(indexShard::recoverLocallyUpToGlobalCheckpoint, 10, TimeUnit.SECONDS); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java index b12bcd8b55880..a5ace3e357f90 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -466,7 +466,7 @@ private static void createSnapshotThatCanBeUsedDuringRecovery(String indexName) }, 60, TimeUnit.SECONDS); // Force merge to make sure that the resulting snapshot would contain the same index files as the safe commit - ForceMergeResponse forceMergeResponse = client().admin().indices().prepareForceMerge(indexName).setFlush(randomBoolean()).get(); + BroadcastResponse forceMergeResponse = client().admin().indices().prepareForceMerge(indexName).setFlush(randomBoolean()).get(); assertThat(forceMergeResponse.getTotalShards(), equalTo(forceMergeResponse.getSuccessfulShards())); // create repo diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 8d44c37fcd9f1..01e21c929e654 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -253,7 +253,7 @@ public void testReadBlobWithReadTimeouts() { Exception exception = expectThrows( unresponsiveExceptionType(), - () -> Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_unresponsive")) + () -> Streams.readFully(blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_unresponsive")) ); assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); assertThat(exception.getCause(), instanceOf(SocketTimeoutException.class)); @@ -270,8 +270,8 @@ public void testReadBlobWithReadTimeouts() { exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") - : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", position, length) + ? blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_incomplete") + : blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_incomplete", position, length) ) { Streams.readFully(stream); } @@ -294,6 +294,10 @@ protected OperationPurpose randomRetryingPurpose() { return randomPurpose(); } + protected OperationPurpose randomFiniteRetryingPurpose() { + return randomPurpose(); + } + public void testReadBlobWithNoHttpResponse() { final TimeValue readTimeout = TimeValue.timeValueMillis(between(100, 200)); final BlobContainer blobContainer = createBlobContainer(randomInt(5), readTimeout, null, null); @@ -303,9 +307,9 @@ public void testReadBlobWithNoHttpResponse() { Exception exception = expectThrows(unresponsiveExceptionType(), () -> { if (randomBoolean()) { - Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_no_response")); + Streams.readFully(blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_no_response")); } else { - Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_no_response", 0, 1)); + Streams.readFully(blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_no_response", 0, 1)); } }); assertThat( @@ -328,8 +332,8 @@ public void testReadBlobWithPrematureConnectionClose() { final Exception exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", 0, 1) - : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") + ? blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_incomplete", 0, 1) + : blobContainer.readBlob(randomFiniteRetryingPurpose(), "read_blob_incomplete") ) { Streams.readFully(stream); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 1e66dd061d9b5..b6415eea7db2c 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -16,7 +16,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.network.InetAddresses; @@ -164,7 +164,7 @@ public final void testSnapshotWithLargeSegmentFiles() throws Exception { } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); @@ -191,7 +191,7 @@ public void testRequestStats() throws Exception { } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); @@ -254,7 +254,7 @@ protected static String serverUrl() { /** * Consumes and closes the given {@link InputStream} */ - protected static void drainInputStream(final InputStream inputStream) throws IOException { + public static void drainInputStream(final InputStream inputStream) throws IOException { while (inputStream.read(BUFFER) >= 0) ; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java index 02d3d133de185..e697fbb378773 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java @@ -8,18 +8,12 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentType; -import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -27,9 +21,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public abstract class InternalSingleBucketAggregationTestCase extends InternalAggregationTestCase { @@ -115,38 +106,4 @@ protected final void assertReduced(T reduced, List inputs) { } extraAssertReduced(reduced, inputs); } - - @Override - protected void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) throws IOException { - assertTrue(parsedAggregation instanceof ParsedSingleBucketAggregation); - ParsedSingleBucketAggregation parsed = (ParsedSingleBucketAggregation) parsedAggregation; - - assertEquals(aggregation.getDocCount(), parsed.getDocCount()); - InternalAggregations aggregations = aggregation.getAggregations(); - Map expectedAggregations = new HashMap<>(); - int expectedNumberOfAggregations = 0; - for (Aggregation expectedAggregation : aggregations) { - // since we shuffle xContent, we cannot rely on the order of the original inner aggregations for comparison - assertTrue(expectedAggregation instanceof InternalAggregation); - expectedAggregations.put(expectedAggregation.getName(), expectedAggregation); - expectedNumberOfAggregations++; - } - int parsedNumberOfAggregations = 0; - for (Aggregation parsedAgg : parsed.getAggregations()) { - assertTrue(parsedAgg instanceof ParsedAggregation); - assertTrue(expectedAggregations.keySet().contains(parsedAgg.getName())); - Aggregation expectedInternalAggregation = expectedAggregations.get(parsedAgg.getName()); - final XContentType xContentType = randomFrom(XContentType.values()); - final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - BytesReference expectedBytes = toXContent(expectedInternalAggregation, xContentType, params, false); - BytesReference actualBytes = toXContent(parsedAgg, xContentType, params, false); - assertToXContentEquivalent(expectedBytes, actualBytes, xContentType); - parsedNumberOfAggregations++; - } - assertEquals(expectedNumberOfAggregations, parsedNumberOfAggregations); - Class parsedClass = implementationClass(); - assertTrue(parsedClass != null && parsedClass.isInstance(parsedAggregation)); - } - - protected abstract Class implementationClass(); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java index 1dd4cb29fbd9a..0d94645e2a924 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java @@ -118,11 +118,6 @@ protected void assertReduced(T reduced, List inputs) { } } - @Override - protected Class implementationClass() { - return ParsedGeoGrid.class; - } - @Override protected T mutateInstance(T instance) { String name = instance.getName(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java index a9f1ab7780f7f..526c2104e52ae 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java @@ -73,7 +73,7 @@ protected final Map highlight(MapperService mapperServic Map> storedFields = storedFields(processor.storedFieldsSpec(), doc); Source source = Source.fromBytes(doc.source()); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext( - new SearchHit(0, "id"), + SearchHit.unpooled(0, "id"), ir.leaves().get(0), 0, storedFields, diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java b/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java index aa14d0067b68e..194a1a317742d 100644 --- a/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java @@ -11,6 +11,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.telemetry.metric.Instrument; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -106,6 +107,12 @@ public List getMeasurements(InstrumentType instrumentType, String n return metrics.get(instrumentType).called.getOrDefault(Objects.requireNonNull(name), Collections.emptyList()); } + public ArrayList getRegisteredMetrics(InstrumentType instrumentType) { + ArrayList registeredMetrics = new ArrayList<>(); + metrics.get(instrumentType).instruments.forEach((name, registration) -> { registeredMetrics.add(name); }); + return registeredMetrics; + } + /** * Get the {@link Registration} for a given elasticsearch {@link Instrument}. */ diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java index e237f6c9bbb4b..a4c73634dc102 100644 --- a/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java @@ -15,6 +15,7 @@ import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.tracing.Tracer; +import java.util.ArrayList; import java.util.List; /** @@ -41,6 +42,10 @@ public List getLongCounterMeasurement(String name) { return meter.getRecorder().getMeasurements(InstrumentType.LONG_COUNTER, name); } + public List getLongAsyncCounterMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.LONG_ASYNC_COUNTER, name); + } + public List getDoubleUpDownCounterMeasurement(String name) { return meter.getRecorder().getMeasurements(InstrumentType.DOUBLE_UP_DOWN_COUNTER, name); } @@ -65,10 +70,18 @@ public List getLongHistogramMeasurement(String name) { return meter.getRecorder().getMeasurements(InstrumentType.LONG_HISTOGRAM, name); } + public void collect() { + meter.getRecorder().collect(); + } + public void resetMeter() { meter.getRecorder().resetCalls(); } + public ArrayList getRegisteredMetrics(InstrumentType instrumentType) { + return meter.getRecorder().getRegisteredMetrics(instrumentType); + } + @Override public TelemetryProvider getTelemetryProvider(Settings settings) { return new TelemetryProvider() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index 9d151e690b071..5dc707e94bdd7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -117,7 +116,7 @@ protected void cancelSearch(String action) { TaskInfo searchTask = listTasksResponse.getTasks().get(0); logger.info("Cancelling search"); - CancelTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setTargetTaskId(searchTask.taskId()).get(); + ListTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setTargetTaskId(searchTask.taskId()).get(); assertThat(cancelTasksResponse.getTasks(), hasSize(1)); assertThat(cancelTasksResponse.getTasks().get(0).taskId(), equalTo(searchTask.taskId())); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index 770c56f9c5952..4df1e745f3bf4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -32,7 +32,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public abstract class AbstractXContentTestCase extends ESTestCase { - protected static final int NUMBER_OF_TEST_RUNS = 20; + public static final int NUMBER_OF_TEST_RUNS = 20; public static XContentTester xContentTester( CheckedBiFunction createParser, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 175594ac8210f..65b28ad874431 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -34,10 +34,7 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; @@ -57,6 +54,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.internal.AdminClient; @@ -1478,9 +1476,9 @@ protected final DocWriteResponse index(String index, String id, String source) { * * @see #waitForRelocation() */ - protected final RefreshResponse refresh(String... indices) { + protected final BroadcastResponse refresh(String... indices) { waitForRelocation(); - RefreshResponse actionGet = indicesAdmin().prepareRefresh(indices) + BroadcastResponse actionGet = indicesAdmin().prepareRefresh(indices) .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_HIDDEN_FORBID_CLOSED) .get(); assertNoFailures(actionGet); @@ -1498,9 +1496,9 @@ protected final void flushAndRefresh(String... indices) { /** * Flush some or all indices in the cluster. */ - protected final FlushResponse flush(String... indices) { + protected final BroadcastResponse flush(String... indices) { waitForRelocation(); - FlushResponse actionGet = indicesAdmin().prepareFlush(indices).get(); + BroadcastResponse actionGet = indicesAdmin().prepareFlush(indices).get(); for (DefaultShardOperationFailedException failure : actionGet.getShardFailures()) { assertThat("unexpected flush failure " + failure.reason(), failure.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); } @@ -1510,9 +1508,9 @@ protected final FlushResponse flush(String... indices) { /** * Waits for all relocations and force merge all indices in the cluster to 1 segment. */ - protected ForceMergeResponse forceMerge() { + protected BroadcastResponse forceMerge() { waitForRelocation(); - ForceMergeResponse actionGet = indicesAdmin().prepareForceMerge().setMaxNumSegments(1).get(); + BroadcastResponse actionGet = indicesAdmin().prepareForceMerge().setMaxNumSegments(1).get(); assertNoFailures(actionGet); return actionGet; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index c072f5643a5cd..ab4fd807216e5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -40,7 +40,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.bootstrap.BootstrapForTesting; @@ -122,6 +122,8 @@ import org.elasticsearch.xcontent.XContentParser.Token; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matcher; +import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; import org.junit.AfterClass; @@ -2109,6 +2111,18 @@ protected static boolean isTurkishLocale() { || Locale.getDefault().getLanguage().equals(new Locale("az").getLanguage()); } + /* + * Assert.assertThat (inherited from LuceneTestCase superclass) has been deprecated. + * So make sure that all assertThat references use the non-deprecated version. + */ + public static void assertThat(T actual, Matcher matcher) { + MatcherAssert.assertThat(actual, matcher); + } + + public static void assertThat(String reason, T actual, Matcher matcher) { + MatcherAssert.assertThat(reason, actual, matcher); + } + public static T fail(Throwable t, String msg, Object... args) { throw new AssertionError(org.elasticsearch.common.Strings.format(msg, args), t); } @@ -2131,7 +2145,7 @@ public static T expectThrows(Class expectedType, Action ); } - public static T expectThrows(Class expectedType, ActionRequestBuilder builder) { + public static T expectThrows(Class expectedType, RequestBuilder builder) { return expectThrows( expectedType, "Expected exception " + expectedType.getSimpleName() + " but no exception was thrown", diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 343f2b5bbff92..d93670be26b07 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -8,12 +8,9 @@ package org.elasticsearch.test; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -21,16 +18,12 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.index.mapper.DateFieldMapper.Resolution; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.Aggregation; @@ -42,112 +35,10 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.filter.ParsedFilter; -import org.elasticsearch.search.aggregations.bucket.filter.ParsedFilters; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; -import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoTileGrid; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; -import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; -import org.elasticsearch.search.aggregations.bucket.histogram.ParsedVariableWidthHistogram; -import org.elasticsearch.search.aggregations.bucket.histogram.VariableWidthHistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.missing.ParsedMissing; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.nested.ParsedNested; -import org.elasticsearch.search.aggregations.bucket.nested.ParsedReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.range.GeoDistanceAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.range.IpRangeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.range.ParsedBinaryRange; -import org.elasticsearch.search.aggregations.bucket.range.ParsedDateRange; -import org.elasticsearch.search.aggregations.bucket.range.ParsedGeoDistance; -import org.elasticsearch.search.aggregations.bucket.range.ParsedRange; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; -import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler; -import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; -import org.elasticsearch.search.aggregations.bucket.terms.LongRareTerms; -import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongRareTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringRareTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.SignificantLongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.SignificantStringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.StringRareTerms; -import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedAvg; -import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.ParsedMedianAbsoluteDeviation; -import org.elasticsearch.search.aggregations.metrics.ParsedMin; -import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; -import org.elasticsearch.search.aggregations.metrics.ParsedWeightedAvg; -import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.TopHits; -import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.ParsedBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.ParsedExtendedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket; -import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; -import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.SamplingContext; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; @@ -156,17 +47,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Collectors; import static java.util.Collections.emptyList; -import static java.util.Collections.singletonMap; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.search.aggregations.InternalMultiBucketAggregation.countInnerBucket; -import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -244,79 +128,6 @@ public AggregationReduceContext forFinalReduction() { @SuppressWarnings("this-escape") private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(getNamedWriteables()); - @SuppressWarnings("this-escape") - private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(getNamedXContents()); - - private static final List namedXContents; - static { - Map> map = new HashMap<>(); - map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); - map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c)); - map.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c)); - map.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c)); - map.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c)); - map.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)); - map.put(MedianAbsoluteDeviationAggregationBuilder.NAME, (p, c) -> ParsedMedianAbsoluteDeviation.fromXContent(p, (String) c)); - map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)); - map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); - map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); - map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); - map.put(WeightedAvgAggregationBuilder.NAME, (p, c) -> ParsedWeightedAvg.fromXContent(p, (String) c)); - map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); - map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c)); - map.put(InternalBucketMetricValue.NAME, (p, c) -> ParsedBucketMetricValue.fromXContent(p, (String) c)); - map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c)); - map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c)); - map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)); - map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c)); - map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c)); - map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); - map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); - map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); - map.put(VariableWidthHistogramAggregationBuilder.NAME, (p, c) -> ParsedVariableWidthHistogram.fromXContent(p, (String) c)); - map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); - map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); - map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); - map.put(LongRareTerms.NAME, (p, c) -> ParsedLongRareTerms.fromXContent(p, (String) c)); - map.put(StringRareTerms.NAME, (p, c) -> ParsedStringRareTerms.fromXContent(p, (String) c)); - map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c)); - map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c)); - map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c)); - map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c)); - map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c)); - map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c)); - map.put(GeoHashGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c)); - map.put(GeoTileGridAggregationBuilder.NAME, (p, c) -> ParsedGeoTileGrid.fromXContent(p, (String) c)); - map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c)); - map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c)); - map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c)); - map.put(FiltersAggregationBuilder.NAME, (p, c) -> ParsedFilters.fromXContent(p, (String) c)); - map.put(SignificantLongTerms.NAME, (p, c) -> ParsedSignificantLongTerms.fromXContent(p, (String) c)); - map.put(SignificantStringTerms.NAME, (p, c) -> ParsedSignificantStringTerms.fromXContent(p, (String) c)); - map.put(ScriptedMetricAggregationBuilder.NAME, (p, c) -> ParsedScriptedMetric.fromXContent(p, (String) c)); - map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c)); - map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c)); - map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); - - namedXContents = map.entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - } - - public static List getDefaultNamedXContents() { - return namedXContents; - } - - protected List getNamedXContents() { - return namedXContents; - } - - @Override - protected NamedXContentRegistry xContentRegistry() { - return namedXContentRegistry; - } - @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { return namedWriteableRegistry; @@ -540,130 +351,6 @@ public T createTestInstanceForXContent() { return createTestInstance(); } - public final void testFromXContent() throws IOException { - final T aggregation = createTestInstanceForXContent(); - final ParsedAggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), false); - assertFromXContent(aggregation, parsedAggregation); - } - - public final void testFromXContentWithRandomFields() throws IOException { - final T aggregation = createTestInstanceForXContent(); - final ParsedAggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), true); - assertFromXContent(aggregation, parsedAggregation); - } - - protected abstract void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) throws IOException; - - /** - * Calls {@link ToXContent#toXContent} on many threads and verifies that - * they produce the same result. Async search sometimes does this to - * aggregation responses and, in general, we think it's reasonable for - * everything that can convert itself to json to be able to do so - * concurrently. - */ - public final void testConcurrentToXContent() throws IOException, InterruptedException, ExecutionException { - T testInstance = createTestInstanceForXContent(); - ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesRef firstTimeBytes = toXContent(testInstance, xContentType, params, humanReadable).toBytesRef(); - - /* - * 500 rounds seems to consistently reproduce the issue on Nik's - * laptop. Larger numbers are going to be slower but more likely - * to reproduce the issue. - */ - int rounds = scaledRandomIntBetween(300, 5000); - concurrentTest(() -> { - try { - for (int r = 0; r < rounds; r++) { - assertEquals(firstTimeBytes, toXContent(testInstance, xContentType, params, humanReadable).toBytesRef()); - } - } catch (IOException e) { - throw new AssertionError(e); - } - }); - } - - @SuppressWarnings("unchecked") - protected

    P parseAndAssert( - final InternalAggregation aggregation, - final boolean shuffled, - final boolean addRandomFields - ) throws IOException { - - final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - final XContentType xContentType = randomFrom(XContentType.values()); - final boolean humanReadable = randomBoolean(); - - final BytesReference originalBytes; - try { - if (shuffled) { - originalBytes = toShuffledXContent(aggregation, xContentType, params, humanReadable); - } else { - originalBytes = toXContent(aggregation, xContentType, params, humanReadable); - } - } catch (IOException e) { - throw new IOException("error converting " + aggregation, e); - } - BytesReference mutated; - if (addRandomFields) { - /* - * - we don't add to the root object because it should only contain - * the named aggregation to test - we don't want to insert into the - * "meta" object, because we pass on everything we find there - * - * - we don't want to directly insert anything random into "buckets" - * objects, they are used with "keyed" aggregations and contain - * named bucket objects. Any new named object on this level should - * also be a bucket and be parsed as such. - * - * we also exclude top_hits that contain SearchHits, as all unknown fields - * on a root level of SearchHit are interpreted as meta-fields and will be kept. - */ - Predicate basicExcludes = path -> path.isEmpty() - || path.endsWith(Aggregation.CommonFields.META.getPreferredName()) - || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()) - || path.contains("top_hits"); - Predicate excludes = basicExcludes.or(excludePathsFromXContentInsertion()); - mutated = insertRandomFields(xContentType, originalBytes, excludes, random()); - } else { - mutated = originalBytes; - } - - SetOnce parsedAggregation = new SetOnce<>(); - try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, parsedAggregation::set); - - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); - - Aggregation agg = parsedAggregation.get(); - assertEquals(aggregation.getName(), agg.getName()); - assertEquals(aggregation.getMetadata(), agg.getMetadata()); - - assertTrue(agg instanceof ParsedAggregation); - assertEquals(aggregation.getType(), agg.getType()); - - BytesReference parsedBytes = toXContent(agg, xContentType, params, humanReadable); - assertToXContentEquivalent(originalBytes, parsedBytes, xContentType); - - return (P) agg; - } - - } - - /** - * Overwrite this in your test if other than the basic xContent paths should be excluded during insertion of random fields - */ - protected Predicate excludePathsFromXContentInsertion() { - return path -> false; - } - /** * A random {@link DocValueFormat} that can be used in aggregations which * compute numbers. @@ -740,45 +427,4 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ZERO; } } - - protected static class ParsedTopHits extends ParsedAggregation implements TopHits { - - private SearchHits searchHits; - - @Override - public String getType() { - return TopHitsAggregationBuilder.NAME; - } - - @Override - public SearchHits getHits() { - return searchHits; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return ChunkedToXContent.wrapAsToXContent(searchHits).toXContent(builder, params); - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedTopHits.class.getSimpleName(), - true, - ParsedTopHits::new - ); - - static { - declareAggregationFields(PARSER); - PARSER.declareObject( - (topHit, searchHits) -> topHit.searchHits = searchHits, - (parser, context) -> SearchHits.fromXContent(parser), - new ParseField(SearchHits.Fields.HITS) - ); - } - - public static ParsedTopHits fromXContent(XContentParser parser, String name) throws IOException { - ParsedTopHits aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java index 92a6bab9d427c..897a3fec795e1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java @@ -17,18 +17,13 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.SamplingContext; -import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -100,18 +95,6 @@ protected final T createTestInstance(String name, Map metadata) protected abstract T createTestInstance(String name, Map metadata, InternalAggregations aggregations); - /** - * The parsed version used by the deprecated high level rest client or - * {@code null} if the deprecated high level rest client isn't supported - * by this agg. - */ - protected abstract Class> implementationClass(); - - @Override - protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) { - assertMultiBucketsAggregations(aggregation, parsedAggregation, false); - } - @Override public final T createTestInstanceForXContent() { return createTestInstanceForXContent(randomAlphaOfLength(5), createTestMetadata(), createSubAggregations()); @@ -121,18 +104,6 @@ protected T createTestInstanceForXContent(String name, Map metad return createTestInstance(name, metadata, subAggs); } - public void testIterators() throws IOException { - final T aggregation = createTestInstanceForXContent(); - assertMultiBucketsAggregations(aggregation, parseAndAssert(aggregation, false, false), true); - } - - @Override - protected

    P parseAndAssert(InternalAggregation aggregation, boolean shuffled, boolean addRandomFields) - throws IOException { - assumeFalse("deprecated high level rest client not supported", implementationClass() == null); - return super.parseAndAssert(aggregation, shuffled, addRandomFields); - } - @Override protected void assertSampled(T sampled, T reduced, SamplingContext samplingContext) { assertBucketCountsScaled(sampled.getBuckets(), reduced.getBuckets(), samplingContext); @@ -151,89 +122,6 @@ protected void assertBucketCountsScaled( } } - private void assertMultiBucketsAggregations(Aggregation expected, Aggregation actual, boolean checkOrder) { - assertTrue(expected instanceof MultiBucketsAggregation); - MultiBucketsAggregation expectedMultiBucketsAggregation = (MultiBucketsAggregation) expected; - - assertTrue(actual instanceof MultiBucketsAggregation); - MultiBucketsAggregation actualMultiBucketsAggregation = (MultiBucketsAggregation) actual; - - assertMultiBucketsAggregation(expectedMultiBucketsAggregation, actualMultiBucketsAggregation, checkOrder); - - List expectedBuckets = expectedMultiBucketsAggregation.getBuckets(); - List actualBuckets = actualMultiBucketsAggregation.getBuckets(); - assertEquals(expectedBuckets.size(), actualBuckets.size()); - - if (checkOrder) { - Iterator expectedIt = expectedBuckets.iterator(); - Iterator actualIt = actualBuckets.iterator(); - while (expectedIt.hasNext()) { - MultiBucketsAggregation.Bucket expectedBucket = expectedIt.next(); - MultiBucketsAggregation.Bucket actualBucket = actualIt.next(); - assertBucket(expectedBucket, actualBucket, true); - } - } else { - for (MultiBucketsAggregation.Bucket expectedBucket : expectedBuckets) { - final Object expectedKey = expectedBucket.getKey(); - boolean found = false; - - for (MultiBucketsAggregation.Bucket actualBucket : actualBuckets) { - final Object actualKey = actualBucket.getKey(); - if ((actualKey != null && actualKey.equals(expectedKey)) || (actualKey == null && expectedKey == null)) { - found = true; - assertBucket(expectedBucket, actualBucket, false); - break; - } - } - assertTrue("Failed to find bucket with key [" + expectedBucket.getKey() + "]", found); - } - } - } - - protected void assertMultiBucketsAggregation(MultiBucketsAggregation expected, MultiBucketsAggregation actual, boolean checkOrder) { - Class> parsedClass = implementationClass(); - assertNotNull("Parsed aggregation class must not be null", parsedClass); - assertTrue( - "Unexpected parsed class, expected instance of: " + actual + ", but was: " + parsedClass, - parsedClass.isInstance(actual) - ); - - assertTrue(expected instanceof InternalAggregation); - assertEquals(expected.getName(), actual.getName()); - assertEquals(expected.getMetadata(), actual.getMetadata()); - assertEquals(expected.getType(), actual.getType()); - } - - protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) { - assertTrue(expected instanceof InternalMultiBucketAggregation.InternalBucket); - assertTrue(actual instanceof ParsedMultiBucketAggregation.ParsedBucket); - - assertEquals(expected.getKey(), actual.getKey()); - assertEquals(expected.getKeyAsString(), actual.getKeyAsString()); - assertEquals(expected.getDocCount(), actual.getDocCount()); - - Aggregations expectedAggregations = expected.getAggregations(); - Aggregations actualAggregations = actual.getAggregations(); - assertEquals(expectedAggregations.asList().size(), actualAggregations.asList().size()); - - if (checkOrder) { - Iterator expectedIt = expectedAggregations.iterator(); - Iterator actualIt = actualAggregations.iterator(); - - while (expectedIt.hasNext()) { - Aggregation expectedAggregation = expectedIt.next(); - Aggregation actualAggregation = actualIt.next(); - assertMultiBucketsAggregations(expectedAggregation, actualAggregation, true); - } - } else { - for (Aggregation expectedAggregation : expectedAggregations) { - Aggregation actualAggregation = actualAggregations.get(expectedAggregation.getName()); - assertNotNull(actualAggregation); - assertMultiBucketsAggregations(expectedAggregation, actualAggregation, false); - } - } - } - @Override public void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) { /* diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index 92349206919ba..ac016a14d2fce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.monitor.fs.FsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.transport.RemoteConnectionStrategy; @@ -35,6 +36,12 @@ public final class InternalSettingsPlugin extends Plugin { Property.IndexScope, Property.NodeScope ); + public static final Setting USE_COMPOUND_FILE = Setting.boolSetting( + EngineConfig.USE_COMPOUND_FILE, + true, + Property.IndexScope, + Property.NodeScope + ); public static final Setting INDEX_CREATION_DATE_SETTING = Setting.longSetting( IndexMetadata.SETTING_CREATION_DATE, -1, @@ -54,6 +61,7 @@ public final class InternalSettingsPlugin extends Plugin { public List> getSettings() { return Arrays.asList( MERGE_ENABLED, + USE_COMPOUND_FILE, INDEX_CREATION_DATE_SETTING, PROVIDED_NAME_SETTING, TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING, diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 99eb44bcec5e6..387a9099c8199 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -246,7 +246,7 @@ public static Tuple randomShardInfo(Random random) { public static Tuple randomShardInfo(Random random, boolean withShardFailures) { int total = randomIntBetween(random, 1, 10); if (withShardFailures == false) { - return Tuple.tuple(new ShardInfo(total, total), new ShardInfo(total, total)); + return Tuple.tuple(ShardInfo.allSuccessful(total), ShardInfo.allSuccessful(total)); } int successful = randomIntBetween(random, 1, Math.max(1, (total - 1))); @@ -260,7 +260,7 @@ public static Tuple randomShardInfo(Random random, boolean actualFailures[i] = failure.v1(); expectedFailures[i] = failure.v2(); } - return Tuple.tuple(new ShardInfo(total, successful, actualFailures), new ShardInfo(total, successful, expectedFailures)); + return Tuple.tuple(ShardInfo.of(total, successful, actualFailures), ShardInfo.of(total, successful, expectedFailures)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java index 766c9176c6846..70e3d1ddcdef1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpNodeClient.java @@ -13,9 +13,8 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; @@ -58,8 +57,7 @@ public void initialize( TaskManager taskManager, Supplier localNodeId, Transport.Connection localConnection, - RemoteClusterService remoteClusterService, - NamedWriteableRegistry namedWriteableRegistry + RemoteClusterService remoteClusterService ) { throw new UnsupportedOperationException("cannot initialize " + this.getClass().getSimpleName()); } @@ -81,7 +79,7 @@ public String getLocalNodeId() { } @Override - public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { + public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { return null; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index dc48868703bbb..99cf880a83604 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -14,8 +14,8 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -99,7 +99,7 @@ public class ElasticsearchAssertions { - public static void assertAcked(ActionRequestBuilder builder) { + public static void assertAcked(RequestBuilder builder) { assertAcked(builder, TimeValue.timeValueSeconds(30)); } @@ -107,7 +107,7 @@ public static void assertAcked(ActionFuture fu assertAcked(future.actionGet()); } - public static void assertAcked(ActionRequestBuilder builder, TimeValue timeValue) { + public static void assertAcked(RequestBuilder builder, TimeValue timeValue) { assertAcked(builder.get(timeValue)); } @@ -141,7 +141,7 @@ public static void assertAcked(CreateIndexResponse response) { * * @param builder the request builder */ - public static void assertBlocked(ActionRequestBuilder builder) { + public static void assertBlocked(RequestBuilder builder) { assertBlocked(builder, (ClusterBlock) null); } @@ -179,7 +179,7 @@ public static void assertBlocked(BaseBroadcastResponse replicatedBroadcastRespon * @param builder the request builder * @param expectedBlockId the expected block id */ - public static void assertBlocked(final ActionRequestBuilder builder, @Nullable final Integer expectedBlockId) { + public static void assertBlocked(final RequestBuilder builder, @Nullable final Integer expectedBlockId) { var e = ESTestCase.expectThrows(ClusterBlockException.class, builder); assertThat(e.blocks(), not(empty())); RestStatus status = checkRetryableBlock(e.blocks()) ? RestStatus.TOO_MANY_REQUESTS : RestStatus.FORBIDDEN; @@ -200,7 +200,7 @@ public static void assertBlocked(final ActionRequestBuilder builder, @Null * @param builder the request builder * @param expectedBlock the expected block */ - public static void assertBlocked(final ActionRequestBuilder builder, @Nullable final ClusterBlock expectedBlock) { + public static void assertBlocked(final RequestBuilder builder, @Nullable final ClusterBlock expectedBlock) { assertBlocked(builder, expectedBlock != null ? expectedBlock.id() : null); } @@ -340,12 +340,12 @@ public static void assertSearchHit(SearchResponse searchResponse, int number, Ma assertThat(searchResponse.getHits().getAt(number - 1), matcher); } - public static void assertNoFailures(ActionRequestBuilder searchRequestBuilder) { + public static void assertNoFailures(RequestBuilder searchRequestBuilder) { assertNoFailuresAndResponse(searchRequestBuilder, r -> {}); } public static void assertNoFailuresAndResponse( - ActionRequestBuilder searchRequestBuilder, + RequestBuilder searchRequestBuilder, Consumer consumer ) { assertResponse(searchRequestBuilder, res -> { @@ -366,7 +366,7 @@ public static void assertNoFailuresAndResponse(ActionFuture resp } public static void assertResponse( - ActionRequestBuilder searchRequestBuilder, + RequestBuilder searchRequestBuilder, Consumer consumer ) { var res = searchRequestBuilder.get(); @@ -430,7 +430,7 @@ public static void assertResponse(ActionFuture res } public static void assertCheckedResponse( - ActionRequestBuilder searchRequestBuilder, + RequestBuilder searchRequestBuilder, CheckedConsumer consumer ) throws IOException { var res = searchRequestBuilder.get(); @@ -692,7 +692,7 @@ public static T assertBooleanSubQuery(Query query, Class su * Run the request from a given builder and check that it throws an exception of the right type, with a given {@link RestStatus} */ public static void assertRequestBuilderThrows( - ActionRequestBuilder builder, + RequestBuilder builder, Class exceptionClass, RestStatus status ) { @@ -705,7 +705,7 @@ public static void assertRequestBuilderThrows( * @param extraInfo extra information to add to the failure message */ public static void assertRequestBuilderThrows( - ActionRequestBuilder builder, + RequestBuilder builder, Class exceptionClass, String extraInfo ) { @@ -767,11 +767,11 @@ public static void assertFutureThrows( } } - public static void assertRequestBuilderThrows(ActionRequestBuilder builder, RestStatus status) { + public static void assertRequestBuilderThrows(RequestBuilder builder, RestStatus status) { assertFutureThrows(builder.execute(), status); } - public static void assertRequestBuilderThrows(ActionRequestBuilder builder, RestStatus status, String extraInfo) { + public static void assertRequestBuilderThrows(RequestBuilder builder, RestStatus status, String extraInfo) { assertFutureThrows(builder.execute(), status, extraInfo); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 20cd1997fd70e..a3427b3778b0a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -32,8 +32,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -59,6 +60,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureSpecification; @@ -71,6 +73,7 @@ import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -84,6 +87,7 @@ import org.junit.Before; import java.io.BufferedReader; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -127,7 +131,6 @@ import static org.elasticsearch.client.RestClient.IGNORE_RESPONSE_CODES_PARAM; import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.test.rest.TestFeatureService.ALL_FEATURES; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -228,7 +231,22 @@ public enum ProductFeature { private static EnumSet availableFeatures; private static Set nodesVersions; - private static TestFeatureService testFeatureService = ALL_FEATURES; + + private static final TestFeatureService ALL_FEATURES = new TestFeatureService() { + @Override + public boolean clusterHasFeature(String featureId) { + return true; + } + + @Override + public Set getAllSupportedFeatures() { + throw new UnsupportedOperationException( + "Only available to properly initialized TestFeatureService. See ESRestTestCase#createTestFeatureService" + ); + } + }; + + protected static TestFeatureService testFeatureService = ALL_FEATURES; protected static Set getCachedNodesVersions() { assert nodesVersions != null; @@ -256,6 +274,10 @@ protected static boolean clusterHasFeature(NodeFeature feature) { return testFeatureService.clusterHasFeature(feature.id()); } + protected static boolean testFeatureServiceInitialized() { + return testFeatureService != ALL_FEATURES; + } + @Before public void initClient() throws IOException { if (client == null) { @@ -263,7 +285,7 @@ public void initClient() throws IOException { assert clusterHosts == null; assert availableFeatures == null; assert nodesVersions == null; - assert testFeatureService == ALL_FEATURES; + assert testFeatureServiceInitialized() == false; clusterHosts = parseClusterHosts(getTestRestCluster()); logger.info("initializing REST clients against {}", clusterHosts); client = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); @@ -325,7 +347,7 @@ public void initClient() throws IOException { testFeatureService = createTestFeatureService(getClusterStateFeatures(adminClient), semanticNodeVersions); } - assert testFeatureService != ALL_FEATURES; + assert testFeatureServiceInitialized(); assert client != null; assert adminClient != null; assert clusterHosts != null; @@ -333,19 +355,28 @@ public void initClient() throws IOException { assert nodesVersions != null; } - protected static TestFeatureService createTestFeatureService( + protected TestFeatureService createTestFeatureService( Map> clusterStateFeatures, Set semanticNodeVersions ) { // Historical features information is unavailable when using legacy test plugins boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null; - var providers = hasHistoricalFeaturesInformation - ? List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()) - : List.of(new RestTestLegacyFeatures()); + + final List featureSpecifications; + if (hasHistoricalFeaturesInformation) { + featureSpecifications = List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()); + } else { + logger.warn( + "This test is running on the legacy test framework; historical features from production code will not be available. " + + "You need to port the test to the new test plugins in order to use historical features from production code. " + + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.", + RestTestLegacyFeatures.class.getCanonicalName() + ); + featureSpecifications = List.of(new RestTestLegacyFeatures()); + } return new ESRestTestFeatureService( - hasHistoricalFeaturesInformation, - providers, + featureSpecifications, semanticNodeVersions, ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values()) ); @@ -1254,15 +1285,33 @@ protected void refreshAllIndices() throws IOException { client().performRequest(refreshRequest); } - protected static RefreshResponse refresh(String index) throws IOException { + protected static BroadcastResponse refresh(String index) throws IOException { return refresh(client(), index); } - protected static RefreshResponse refresh(RestClient client, String index) throws IOException { + private static final ConstructingObjectParser BROADCAST_RESPONSE_PARSER = new ConstructingObjectParser<>( + "broadcast_response", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new BroadcastResponse( + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + + static { + BaseBroadcastResponse.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); + } + + protected static BroadcastResponse refresh(RestClient client, String index) throws IOException { Request refreshRequest = new Request("POST", "/" + index + "/_refresh"); Response response = client.performRequest(refreshRequest); try (var parser = responseAsParser(response)) { - return RefreshResponse.fromXContent(parser); + return BROADCAST_RESPONSE_PARSER.apply(parser, null); } } @@ -2343,6 +2392,7 @@ private static class ESRestTestCaseHistoricalFeatures implements FeatureSpecific private static Map historicalFeatures; @Override + @SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file") public Map getHistoricalFeatures() { if (historicalFeatures == null) { Map historicalFeaturesMap = new HashMap<>(); @@ -2353,7 +2403,7 @@ public Map getHistoricalFeatures() { ); } - String[] metadataFiles = metadataPath.split(System.getProperty("path.separator")); + String[] metadataFiles = metadataPath.split(File.pathSeparator); for (String metadataFile : metadataFiles) { try ( InputStream in = Files.newInputStream(PathUtils.get(metadataFile)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 5bb22058e4688..c8647f4e9c43b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -9,11 +9,12 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; -import org.elasticsearch.core.Strings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureData; import org.elasticsearch.features.FeatureSpecification; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.NavigableMap; import java.util.Set; @@ -22,35 +23,26 @@ class ESRestTestFeatureService implements TestFeatureService { private final Predicate historicalFeaturesPredicate; private final Set clusterStateFeatures; + private final Set allSupportedFeatures; ESRestTestFeatureService( - boolean hasHistoricalFeaturesInformation, List specs, Collection nodeVersions, Set clusterStateFeatures ) { - var minNodeVersion = nodeVersions.stream().min(Version::compareTo); + var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder()); var featureData = FeatureData.createFromSpecifications(specs); var historicalFeatures = featureData.getHistoricalFeatures(); - var allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue(); + Set allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue(); - var errorMessage = Strings.format( - hasHistoricalFeaturesInformation - ? "Check the feature has been added to the correct FeatureSpecification in the relevant module or, if this is a " - + "legacy feature used only in tests, to a test-only FeatureSpecification such as %s." - : "This test is running on the legacy test framework; historical features from production code will not be available. " - + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as %s.", - RestTestLegacyFeatures.class.getCanonicalName() - ); - this.historicalFeaturesPredicate = minNodeVersion.>map(v -> featureId -> { - assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage); - return hasHistoricalFeature(historicalFeatures, v, featureId); - }).orElse(featureId -> { - // We can safely assume that new non-semantic versions (serverless) support all historical features - assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage); - return true; - }); + this.allSupportedFeatures = Sets.union(clusterStateFeatures, minNodeVersion.>map(v -> { + var historicalFeaturesForVersion = historicalFeatures.floorEntry(v); + return historicalFeaturesForVersion == null ? Set.of() : historicalFeaturesForVersion.getValue(); + }).orElse(allHistoricalFeatures)); + + this.historicalFeaturesPredicate = minNodeVersion.>map( + v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId) + ).orElse(featureId -> true); // We can safely assume that new non-semantic versions (serverless) support all historical features this.clusterStateFeatures = clusterStateFeatures; } @@ -59,10 +51,16 @@ private static boolean hasHistoricalFeature(NavigableMap> h return features != null && features.getValue().contains(featureId); } + @Override public boolean clusterHasFeature(String featureId) { if (clusterStateFeatures.contains(featureId)) { return true; } return historicalFeaturesPredicate.test(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return allSupportedFeatures; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index fcd2f781ec58d..88232bd7fd16c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -57,10 +57,10 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); // QA - rolling upgrade tests + public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); @UpdateForV9 - public static final NodeFeature WATCHES_VERSION_IN_META = new NodeFeature("watcher.version_in_meta"); @UpdateForV9 public static final NodeFeature SECURITY_ROLE_DESCRIPTORS_OPTIONAL = new NodeFeature("security.role_descriptors_optional"); @@ -76,6 +76,53 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9 public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); + public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); + public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); + + /* + * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. + * Fixed in #98840 + */ + public static final NodeFeature TSDB_EMPTY_TEMPLATE_FIXED = new NodeFeature("indices.tsdb_empty_composable_template_fixed"); + public static final NodeFeature SYNTHETIC_SOURCE_SUPPORTED = new NodeFeature("indices.synthetic_source"); + + public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_SUPPORTED = new NodeFeature("allocator.desired_balance"); + + /* + * Cancel shard allocation command is broken for initial desired balance versions + * and might allocate shard on the node where it is not supposed to be. This + * is fixed by https://github.com/elastic/elasticsearch/pull/93635. + */ + public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_FIXED = new NodeFeature("allocator.desired_balance_fixed"); + public static final NodeFeature INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED = new NodeFeature("settings.indexing_slowlog_level_removed"); + public static final NodeFeature DEPRECATION_WARNINGS_LEAK_FIXED = new NodeFeature("deprecation_warnings_leak_fixed"); + + // QA - Full cluster restart + @UpdateForV9 + public static final NodeFeature REPLICATION_OF_CLOSED_INDICES = new NodeFeature("indices.closed_replication_supported"); + @UpdateForV9 + public static final NodeFeature TASK_INDEX_SYSTEM_INDEX = new NodeFeature("tasks.moved_to_system_index"); + @UpdateForV9 + public static final NodeFeature SOFT_DELETES_ENFORCED = new NodeFeature("indices.soft_deletes_enforced"); + @UpdateForV9 + public static final NodeFeature NEW_TRANSPORT_COMPRESSED_SETTING = new NodeFeature("transport.new_compressed_setting"); + @UpdateForV9 + public static final NodeFeature SHUTDOWN_SUPPORTED = new NodeFeature("shutdown.supported"); + @UpdateForV9 + public static final NodeFeature SERVICE_ACCOUNTS_SUPPORTED = new NodeFeature("auth.service_accounts_supported"); + @UpdateForV9 + public static final NodeFeature TRANSFORM_SUPPORTED = new NodeFeature("transform.supported"); + @UpdateForV9 + public static final NodeFeature SLM_SUPPORTED = new NodeFeature("slm.supported"); + @UpdateForV9 + public static final NodeFeature DATA_STREAMS_SUPPORTED = new NodeFeature("data_stream.supported"); + @UpdateForV9 + public static final NodeFeature NEW_DATA_STREAMS_INDEX_NAME_FORMAT = new NodeFeature("data_stream.new_index_name_format"); + @UpdateForV9 + public static final NodeFeature DISABLE_FIELD_NAMES_FIELD_REMOVED = new NodeFeature("disable_of_field_names_field_removed"); + @UpdateForV9 + public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); + // YAML public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); @@ -103,7 +150,28 @@ public Map getHistoricalFeatures() { entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), entry(ML_INDICES_HIDDEN, Version.V_7_7_0), entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0), - entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1) + entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), + entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), + entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), + entry(SYNTHETIC_SOURCE_SUPPORTED, Version.V_8_4_0), + entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), + entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), + entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), + entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), + entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), + entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), + entry(REPLICATION_OF_CLOSED_INDICES, Version.V_7_2_0), + entry(TASK_INDEX_SYSTEM_INDEX, Version.V_7_10_0), + entry(SOFT_DELETES_ENFORCED, Version.V_8_0_0), + entry(NEW_TRANSPORT_COMPRESSED_SETTING, Version.V_7_14_0), + entry(SHUTDOWN_SUPPORTED, Version.V_7_15_0), + entry(SERVICE_ACCOUNTS_SUPPORTED, Version.V_7_13_0), + entry(TRANSFORM_SUPPORTED, Version.V_7_2_0), + entry(SLM_SUPPORTED, Version.V_7_4_0), + entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), + entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), + entry(DISABLE_FIELD_NAMES_FIELD_REMOVED, Version.V_8_0_0), + entry(ML_NLP_SUPPORTED, Version.V_8_0_0) ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java index 9de1fcf631520..332a00ce895a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java @@ -8,8 +8,10 @@ package org.elasticsearch.test.rest; +import java.util.Set; + public interface TestFeatureService { boolean clusterHasFeature(String featureId); - TestFeatureService ALL_FEATURES = ignored -> true; + Set getAllSupportedFeatures(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 194d7ddb798ca..73ee7ccae9ca0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -101,8 +101,6 @@ public void handleResponse(final long reque ); } catch (IOException | UnsupportedOperationException e) { throw new AssertionError("failed to serialize/deserialize response " + response, e); - } finally { - response.decRef(); } try { transportResponseHandler.handleResponse(deliveredResponse); diff --git a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java index e8a853989e8e5..ce8e3a2574f3e 100644 --- a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.node.Node; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; @@ -30,7 +31,7 @@ public TestThreadPool(String name, ExecutorBuilder... customBuilders) { } public TestThreadPool(String name, Settings settings, ExecutorBuilder... customBuilders) { - super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), customBuilders); + super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), MeterRegistry.NOOP, customBuilders); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index b835a56a6384c..86ed912741796 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -340,7 +340,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -371,7 +371,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -423,7 +423,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.executor(executor); } @@ -497,7 +497,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } } @@ -675,7 +675,7 @@ public TransportResponse.Empty read(StreamInput in) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -732,7 +732,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -798,7 +798,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -862,7 +862,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1120,7 +1120,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1187,7 +1187,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1226,7 +1226,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1281,7 +1281,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1649,7 +1649,7 @@ public Version0Response read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1697,7 +1697,7 @@ public Version1Response read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1750,7 +1750,7 @@ public Version1Response read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1801,7 +1801,7 @@ public Version0Response read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1846,7 +1846,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1903,7 +1903,7 @@ public StringMessageResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.generic(); } @@ -1949,7 +1949,7 @@ public TestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -2006,7 +2006,7 @@ public TestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -2041,7 +2041,7 @@ public TestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -2213,7 +2213,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return executor; } } @@ -2276,7 +2276,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.executor(executor); } } @@ -2585,7 +2585,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.executor(executor); } }; @@ -2638,7 +2638,7 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return threadPool.executor(randomFrom(executors)); } }; @@ -2696,7 +2696,7 @@ protected void doRun() throws Exception { CountDownLatch responseLatch = new CountDownLatch(1); TransportResponseHandler transportResponseHandler = new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -2768,7 +2768,7 @@ protected void doRun() throws Exception { CountDownLatch responseLatch = new CountDownLatch(1); TransportResponseHandler transportResponseHandler = new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -2886,7 +2886,7 @@ protected void doRun() throws Exception { AtomicReference receivedException = new AtomicReference<>(null); TransportResponseHandler transportResponseHandler = new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -3461,7 +3461,7 @@ public void onFailure(final Exception e) { TransportRequestOptions.EMPTY, new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -3550,7 +3550,7 @@ public static Future submitRequest( final TransportResponseHandler futureHandler = new ActionListenerResponseHandler<>( responseListener, handler, - handler.executor(transportService.threadPool) + handler.executor() ); responseListener.addListener(ActionListener.wrap(handler::handleResponse, e -> handler.handleException((TransportException) e))); final PlainActionFuture future = new PlainActionFuture<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java index 889b7cdab4629..5967324a544c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.transport.MockTransport; @@ -263,24 +264,28 @@ public String getProfileName() { @Override public void sendResponse(final TransportResponse response) { + response.mustIncRef(); + final var releasable = Releasables.assertOnce(response::decRef); execute(new RebootSensitiveRunnable() { @Override public void ifRebooted() { - response.decRef(); - cleanupResponseHandler(requestId); + try (releasable) { + cleanupResponseHandler(requestId); + } } @Override public void run() { - final ConnectionStatus connectionStatus = destinationTransport.getConnectionStatus(getLocalNode()); - switch (connectionStatus) { - case CONNECTED, BLACK_HOLE_REQUESTS_ONLY -> handleResponse(requestId, response); - case BLACK_HOLE, DISCONNECTED -> { - response.decRef(); - logger.trace("delaying response to {}: channel is {}", requestDescription, connectionStatus); - onBlackholedDuringSend(requestId, action, destinationTransport); + try (releasable) { + final ConnectionStatus connectionStatus = destinationTransport.getConnectionStatus(getLocalNode()); + switch (connectionStatus) { + case CONNECTED, BLACK_HOLE_REQUESTS_ONLY -> handleResponse(requestId, response); + case BLACK_HOLE, DISCONNECTED -> { + logger.trace("delaying response to {}: channel is {}", requestDescription, connectionStatus); + onBlackholedDuringSend(requestId, action, destinationTransport); + } + default -> throw new AssertionError("unexpected status: " + connectionStatus); } - default -> throw new AssertionError("unexpected status: " + connectionStatus); } } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/DisruptableMockTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/DisruptableMockTransportTests.java index 6d5542db5e529..9582d28327122 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/DisruptableMockTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/DisruptableMockTransportTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableRef; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -229,7 +230,9 @@ private TransportRequestHandler requestHandlerShouldNotBeCalled() { private TransportRequestHandler requestHandlerRepliesNormally() { return (request, channel, task) -> { logger.debug("got a dummy request, replying normally..."); - channel.sendResponse(new TestResponse()); + try (var responseRef = ReleasableRef.of(new TestResponse())) { + channel.sendResponse(responseRef.get()); + } }; } @@ -255,7 +258,7 @@ public T read(StreamInput in) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -279,7 +282,7 @@ public TestResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -305,7 +308,7 @@ public T read(StreamInput in) { } @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -415,7 +418,9 @@ public void testDisconnectedOnSuccessfulResponse() throws IOException { assertNull(responseHandlerException.get()); disconnectedLinks.add(Tuple.tuple(node2, node1)); - responseHandlerChannel.get().sendResponse(new TestResponse()); + try (var responseRef = ReleasableRef.of(new TestResponse())) { + responseHandlerChannel.get().sendResponse(responseRef.get()); + } deterministicTaskQueue.runAllTasks(); deliverBlackholedRequests.run(); deterministicTaskQueue.runAllTasks(); @@ -453,7 +458,9 @@ public void testUnavailableOnSuccessfulResponse() throws IOException { assertNotNull(responseHandlerChannel.get()); blackholedLinks.add(Tuple.tuple(node2, node1)); - responseHandlerChannel.get().sendResponse(new TestResponse()); + try (var responseRef = ReleasableRef.of(new TestResponse())) { + responseHandlerChannel.get().sendResponse(responseRef.get()); + } deterministicTaskQueue.runAllRunnableTasks(); } @@ -485,7 +492,9 @@ public void testUnavailableOnRequestOnlyReceivesSuccessfulResponse() throws IOEx blackholedRequestLinks.add(Tuple.tuple(node1, node2)); blackholedRequestLinks.add(Tuple.tuple(node2, node1)); - responseHandlerChannel.get().sendResponse(new TestResponse()); + try (var responseRef = ReleasableRef.of(new TestResponse())) { + responseHandlerChannel.get().sendResponse(responseRef.get()); + } deterministicTaskQueue.runAllRunnableTasks(); assertTrue(responseHandlerCalled.get()); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java index aa3982e076ae0..fc45325008ba4 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultEnvironmentProvider.java @@ -20,7 +20,6 @@ public class DefaultEnvironmentProvider implements EnvironmentProvider { private static final String HOSTNAME_OVERRIDE = "LinuxDarwinHostname"; private static final String COMPUTERNAME_OVERRIDE = "WindowsComputername"; - private static final String TESTS_RUNTIME_JAVA_SYSPROP = "tests.runtime.java"; @Override public Map get(LocalNodeSpec nodeSpec) { @@ -28,7 +27,7 @@ public Map get(LocalNodeSpec nodeSpec) { // If we are testing the current version of Elasticsearch, use the configured runtime Java, otherwise use the bundled JDK if (nodeSpec.getDistributionType() == DistributionType.INTEG_TEST || nodeSpec.getVersion().equals(Version.CURRENT)) { - environment.put("ES_JAVA_HOME", System.getProperty(TESTS_RUNTIME_JAVA_SYSPROP)); + environment.put("ES_JAVA_HOME", System.getProperty("java.home")); } // Override the system hostname variables for testing diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java index 473456f6b0cc3..3341b20a89d3c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java @@ -33,6 +33,7 @@ public void apply(LocalClusterSpecBuilder builder) { .setting("xpack.security.fips_mode.enabled", "true") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.authc.password_hashing.algorithm", "pbkdf2_stretch") + .setting("xpack.security.fips_mode.required_providers", () -> "[BCFIPS, BCJSSE]", n -> n.getVersion().onOrAfter("8.13.0")) .keystorePassword("keystore-password"); } } diff --git a/test/test-clusters/src/main/resources/fips/fips_java.policy b/test/test-clusters/src/main/resources/fips/fips_java.policy index bbfc1caf7593a..c259b0bc908d8 100644 --- a/test/test-clusters/src/main/resources/fips/fips_java.policy +++ b/test/test-clusters/src/main/resources/fips/fips_java.policy @@ -18,3 +18,8 @@ grant { permission org.bouncycastle.crypto.CryptoServicesPermission "exportPrivateKey"; permission java.io.FilePermission "${javax.net.ssl.trustStore}", "read"; }; + +// rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { + permission java.net.SocketPermission "*", "connect"; +}; diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 049102f87a544..4be9481df58b1 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -446,20 +446,10 @@ public void test() throws IOException { } // skip test if the whole suite (yaml file) is disabled - assumeFalse( - testCandidate.getSetupSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), - testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext) - ); - // skip test if the whole suite (yaml file) is disabled - assumeFalse( - testCandidate.getTeardownSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), - testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext) - ); + testCandidate.getSetupSection().getPrerequisiteSection().evaluate(restTestExecutionContext, testCandidate.getSuitePath()); + testCandidate.getTeardownSection().getPrerequisiteSection().evaluate(restTestExecutionContext, testCandidate.getSuitePath()); // skip test if test section is disabled - assumeFalse( - testCandidate.getTestSection().getSkipSection().getSkipMessage(testCandidate.getTestPath()), - testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext) - ); + testCandidate.getTestSection().getPrerequisiteSection().evaluate(restTestExecutionContext, testCandidate.getTestPath()); // let's check that there is something to run, otherwise there might be a problem with the test section if (testCandidate.getTestSection().getExecutableSections().isEmpty()) { @@ -468,11 +458,11 @@ public void test() throws IOException { assumeFalse( "[" + testCandidate.getTestPath() + "] skipped, reason: in fips 140 mode", - inFipsJvm() && testCandidate.getTestSection().getSkipSection().yamlRunnerHasFeature("fips_140") + inFipsJvm() && testCandidate.getTestSection().getPrerequisiteSection().hasYamlRunnerFeature("fips_140") ); final Settings globalTemplateSettings = getGlobalTemplateSettings( - testCandidate.getTestSection().getSkipSection().yamlRunnerHasFeature("default_shards") + testCandidate.getTestSection().getPrerequisiteSection().hasYamlRunnerFeature("default_shards") ); if (globalTemplateSettings.isEmpty() == false && ESRestTestCase.has(ProductFeature.LEGACY_TEMPLATES)) { diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java index e152f626b8541..58c1e3b82e336 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java @@ -36,6 +36,19 @@ public String getApi() { return api; } + public ApiCallSection copyWithNewApi(String api) { + ApiCallSection copy = new ApiCallSection(api); + for (var e : params.entrySet()) { + copy.addParam(e.getKey(), e.getValue()); + } + copy.addHeaders(headers); + for (var b : bodies) { + copy.addBody(b); + } + copy.nodeSelector = nodeSelector; + return copy; + } + public Map getParams() { // make sure we never modify the parameters once returned return unmodifiableMap(params); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java index 740befe2f3a6a..f679a725c4feb 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java @@ -28,7 +28,7 @@ public static ClientYamlTestSection parse(XContentParser parser) throws IOExcept List executableSections = new ArrayList<>(); try { parser.nextToken(); - SkipSection skipSection = SkipSection.parseIfNext(parser); + PrerequisiteSection prerequisiteSection = PrerequisiteSection.parseIfNext(parser); while (parser.currentToken() != XContentParser.Token.END_ARRAY) { ParserUtils.advanceToFieldName(parser); executableSections.add(ExecutableSection.parse(parser)); @@ -45,7 +45,7 @@ public static ClientYamlTestSection parse(XContentParser parser) throws IOExcept ); } parser.nextToken(); - return new ClientYamlTestSection(sectionLocation, sectionName, skipSection, executableSections); + return new ClientYamlTestSection(sectionLocation, sectionName, prerequisiteSection, executableSections); } catch (Exception e) { throw new ParsingException(parser.getTokenLocation(), "Error parsing test named [" + sectionName + "]", e); } @@ -53,18 +53,18 @@ public static ClientYamlTestSection parse(XContentParser parser) throws IOExcept private final XContentLocation location; private final String name; - private final SkipSection skipSection; + private final PrerequisiteSection prerequisiteSection; private final List executableSections; public ClientYamlTestSection( XContentLocation location, String name, - SkipSection skipSection, + PrerequisiteSection prerequisiteSection, List executableSections ) { this.location = location; this.name = name; - this.skipSection = Objects.requireNonNull(skipSection, "skip section cannot be null"); + this.prerequisiteSection = Objects.requireNonNull(prerequisiteSection, "skip section cannot be null"); this.executableSections = Collections.unmodifiableList(executableSections); } @@ -76,8 +76,8 @@ public String getName() { return name; } - public SkipSection getSkipSection() { - return skipSection; + public PrerequisiteSection getPrerequisiteSection() { + return prerequisiteSection; } public List getExecutableSections() { diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index 48f24d3a935af..65a23bd376212 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -175,7 +175,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> false == section.getExpectedWarningHeaders().isEmpty()) - .filter(section -> false == hasSkipFeature("warnings", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("warnings", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [warnings] section without a corresponding ["skip": "features": "warnings"] \ so runners that do not support the [warnings] section can skip the test at line [%d]\ @@ -187,7 +187,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> false == section.getExpectedWarningHeadersRegex().isEmpty()) - .filter(section -> false == hasSkipFeature("warnings_regex", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("warnings_regex", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [warnings_regex] section without a corresponding \ ["skip": "features": "warnings_regex"] so runners that do not support the [warnings_regex] \ @@ -201,7 +201,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> false == section.getAllowedWarningHeaders().isEmpty()) - .filter(section -> false == hasSkipFeature("allowed_warnings", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("allowed_warnings", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [allowed_warnings] section without a corresponding \ ["skip": "features": "allowed_warnings"] so runners that do not support the [allowed_warnings] \ @@ -215,7 +215,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> false == section.getAllowedWarningHeadersRegex().isEmpty()) - .filter(section -> false == hasSkipFeature("allowed_warnings_regex", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("allowed_warnings_regex", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [allowed_warnings_regex] section without a corresponding \ ["skip": "features": "allowed_warnings_regex"] so runners that do not support the [allowed_warnings_regex] \ @@ -229,7 +229,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> NodeSelector.ANY != section.getApiCallSection().getNodeSelector()) - .filter(section -> false == hasSkipFeature("node_selector", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("node_selector", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [node_selector] section without a corresponding \ ["skip": "features": "node_selector"] so runners that do not support the [node_selector] section \ @@ -241,7 +241,7 @@ private static Stream validateExecutableSections( errors, sections.stream() .filter(section -> section instanceof ContainsAssertion) - .filter(section -> false == hasSkipFeature("contains", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("contains", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [contains] assertion without a corresponding ["skip": "features": "contains"] \ so runners that do not support the [contains] assertion can skip the test at line [%d]\ @@ -254,7 +254,7 @@ private static Stream validateExecutableSections( .filter(section -> section instanceof DoSection) .map(section -> (DoSection) section) .filter(section -> false == section.getApiCallSection().getHeaders().isEmpty()) - .filter(section -> false == hasSkipFeature("headers", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("headers", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [do] with a [headers] section without a corresponding ["skip": "features": "headers"] \ so runners that do not support the [headers] section can skip the test at line [%d]\ @@ -265,7 +265,7 @@ private static Stream validateExecutableSections( errors, sections.stream() .filter(section -> section instanceof CloseToAssertion) - .filter(section -> false == hasSkipFeature("close_to", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("close_to", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add a [close_to] assertion without a corresponding ["skip": "features": "close_to"] \ so runners that do not support the [close_to] assertion can skip the test at line [%d]\ @@ -276,7 +276,7 @@ private static Stream validateExecutableSections( errors, sections.stream() .filter(section -> section instanceof IsAfterAssertion) - .filter(section -> false == hasSkipFeature("is_after", testSection, setupSection, teardownSection)) + .filter(section -> false == hasYamlRunnerFeature("is_after", testSection, setupSection, teardownSection)) .map(section -> String.format(Locale.ROOT, """ attempted to add an [is_after] assertion without a corresponding ["skip": "features": "is_after"] \ so runners that do not support the [is_after] assertion can skip the test at line [%d]\ @@ -286,19 +286,19 @@ private static Stream validateExecutableSections( return errors; } - private static boolean hasSkipFeature( + private static boolean hasYamlRunnerFeature( String feature, ClientYamlTestSection testSection, SetupSection setupSection, TeardownSection teardownSection ) { - return (testSection != null && hasSkipFeature(feature, testSection.getSkipSection())) - || (setupSection != null && hasSkipFeature(feature, setupSection.getSkipSection())) - || (teardownSection != null && hasSkipFeature(feature, teardownSection.getSkipSection())); + return (testSection != null && hasYamlRunnerFeature(feature, testSection.getPrerequisiteSection())) + || (setupSection != null && hasYamlRunnerFeature(feature, setupSection.getPrerequisiteSection())) + || (teardownSection != null && hasYamlRunnerFeature(feature, teardownSection.getPrerequisiteSection())); } - private static boolean hasSkipFeature(String feature, SkipSection skipSection) { - return skipSection != null && skipSection.yamlRunnerHasFeature(feature); + private static boolean hasYamlRunnerFeature(String feature, PrerequisiteSection prerequisiteSection) { + return prerequisiteSection != null && prerequisiteSection.hasYamlRunnerFeature(feature); } public List getTestSections() { diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index b1a9142596d67..00b92eac40d7f 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -266,7 +266,7 @@ public ApiCallSection getApiCallSection() { return apiCallSection; } - void setApiCallSection(ApiCallSection apiCallSection) { + public void setApiCallSection(ApiCallSection apiCallSection) { this.apiCallSection = apiCallSection; } @@ -275,7 +275,7 @@ void setApiCallSection(ApiCallSection apiCallSection) { * If the headers don't match exactly this request is considered to have failed. * Defaults to emptyList. */ - List getExpectedWarningHeaders() { + public List getExpectedWarningHeaders() { return expectedWarningHeaders; } @@ -284,7 +284,7 @@ List getExpectedWarningHeaders() { * If the headers don't match this request is considered to have failed. * Defaults to emptyList. */ - List getExpectedWarningHeadersRegex() { + public List getExpectedWarningHeadersRegex() { return expectedWarningHeadersRegex; } @@ -292,7 +292,7 @@ List getExpectedWarningHeadersRegex() { * Set the warning headers that we expect from this response. If the headers don't match exactly this request is considered to have * failed. Defaults to emptyList. */ - void setExpectedWarningHeaders(List expectedWarningHeaders) { + public void setExpectedWarningHeaders(List expectedWarningHeaders) { this.expectedWarningHeaders = expectedWarningHeaders; } @@ -300,7 +300,7 @@ void setExpectedWarningHeaders(List expectedWarningHeaders) { * Set the warning headers patterns that we expect from this response. If the headers don't match this request is considered to have * failed. Defaults to emptyList. */ - void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { + public void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { this.expectedWarningHeadersRegex = expectedWarningHeadersRegex; } @@ -308,7 +308,7 @@ void setExpectedWarningHeadersRegex(List expectedWarningHeadersRegex) { * Warning headers that we allow from this response. These warning * headers don't cause the test to fail. Defaults to emptyList. */ - List getAllowedWarningHeaders() { + public List getAllowedWarningHeaders() { return allowedWarningHeaders; } @@ -316,7 +316,7 @@ List getAllowedWarningHeaders() { * Warning headers that we allow from this response. These warning * headers don't cause the test to fail. Defaults to emptyList. */ - List getAllowedWarningHeadersRegex() { + public List getAllowedWarningHeadersRegex() { return allowedWarningHeadersRegex; } @@ -324,7 +324,7 @@ List getAllowedWarningHeadersRegex() { * Set the warning headers that we expect from this response. These * warning headers don't cause the test to fail. Defaults to emptyList. */ - void setAllowedWarningHeaders(List allowedWarningHeaders) { + public void setAllowedWarningHeaders(List allowedWarningHeaders) { this.allowedWarningHeaders = allowedWarningHeaders; } @@ -332,7 +332,7 @@ void setAllowedWarningHeaders(List allowedWarningHeaders) { * Set the warning headers pattern that we expect from this response. These * warning headers don't cause the test to fail. Defaults to emptyList. */ - void setAllowedWarningHeadersRegex(List allowedWarningHeadersRegex) { + public void setAllowedWarningHeadersRegex(List allowedWarningHeadersRegex) { this.allowedWarningHeadersRegex = allowedWarningHeadersRegex; } @@ -343,7 +343,6 @@ public XContentLocation getLocation() { @Override public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { - if ("param".equals(catchParam)) { // client should throw validation error before sending request // lets just return without doing anything as we don't have any client to test here @@ -359,17 +358,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx apiCallSection.getHeaders(), apiCallSection.getNodeSelector() ); - if (Strings.hasLength(catchParam)) { - String catchStatusCode; - if (CATCHES.containsKey(catchParam)) { - catchStatusCode = CATCHES.get(catchParam).v1(); - } else if (catchParam.startsWith("/") && catchParam.endsWith("/")) { - catchStatusCode = "4xx|5xx"; - } else { - throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); - } - fail(formatStatusCodeMessage(response, catchStatusCode)); - } + failIfHasCatch(response); final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; @@ -393,27 +382,23 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx } checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { - ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); - if (Strings.hasLength(catchParam) == false) { - fail(formatStatusCodeMessage(restTestResponse, "2xx")); - } else if (CATCHES.containsKey(catchParam)) { - assertStatusCode(restTestResponse); - } else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) { - // the text of the error message matches regular expression - assertThat( - formatStatusCodeMessage(restTestResponse, "4xx|5xx"), - e.getResponseException().getResponse().getStatusLine().getStatusCode(), - greaterThanOrEqualTo(400) - ); - Object error = executionContext.response("error"); - assertThat("error was expected in the response", error, notNullValue()); - // remove delimiters from regex - String regex = catchParam.substring(1, catchParam.length() - 1); - assertThat("the error message was expected to match the provided regex but didn't", error.toString(), matches(regex)); - } else { - throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); - } + checkResponseException(e, executionContext); + } + } + + public void failIfHasCatch(ClientYamlTestResponse response) { + if (Strings.hasLength(catchParam) == false) { + return; + } + String catchStatusCode; + if (CATCHES.containsKey(catchParam)) { + catchStatusCode = CATCHES.get(catchParam).v1(); + } else if (catchParam.startsWith("/") && catchParam.endsWith("/")) { + catchStatusCode = "4xx|5xx"; + } else { + throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); } + fail(formatStatusCodeMessage(response, catchStatusCode)); } void checkElasticProductHeader(final List productHeaders) { @@ -448,7 +433,7 @@ void checkWarningHeaders(final List warningHeaders) { /** * Check that the response contains only the warning headers that we expect. */ - void checkWarningHeaders(final List warningHeaders, String testPath) { + public void checkWarningHeaders(final List warningHeaders, String testPath) { final List unexpected = new ArrayList<>(); final List unmatched = new ArrayList<>(); final List missing = new ArrayList<>(); @@ -536,6 +521,31 @@ void checkWarningHeaders(final List warningHeaders, String testPath) { } } + public void checkResponseException(ClientYamlTestResponseException e, ClientYamlTestExecutionContext executionContext) + throws IOException { + + ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); + if (Strings.hasLength(catchParam) == false) { + fail(formatStatusCodeMessage(restTestResponse, "2xx")); + } else if (CATCHES.containsKey(catchParam)) { + assertStatusCode(restTestResponse); + } else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) { + // the text of the error message matches regular expression + assertThat( + formatStatusCodeMessage(restTestResponse, "4xx|5xx"), + e.getResponseException().getResponse().getStatusLine().getStatusCode(), + greaterThanOrEqualTo(400) + ); + Object error = executionContext.response("error"); + assertThat("error was expected in the response", error, notNullValue()); + // remove delimiters from regex + String regex = catchParam.substring(1, catchParam.length() - 1); + assertThat("the error message was expected to match the provided regex but didn't", error.toString(), matches(regex)); + } else { + throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); + } + } + private static void appendBadHeaders(final StringBuilder sb, final List headers, final String message) { if (headers.isEmpty() == false) { sb.append(message).append(" [\n"); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java new file mode 100644 index 0000000000000..7f65a29e510b6 --- /dev/null +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java @@ -0,0 +1,305 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.test.rest.yaml.section; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.Features; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; +import org.junit.AssumptionViolatedException; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +/** + * Represents a section where prerequisites to run a specific test section or suite are specified. It is possible to specify preconditions + * as a set of `skip` criteria (the test or suite will be skipped if the specified conditions are met) or `requires` criteria (the test or + * suite will be run only if the specified conditions are met) + * Criteria are based on: + * - the elasticsearch cluster version the tests are running against (deprecated) + * - the features supported by the elasticsearch cluster version the tests are running against + * - a specific test runner feature - some runners may not implement the whole set of features + * - an operating system (full name, including specific Linux distributions) - some OS might show a certain behavior + */ +public class PrerequisiteSection { + static class PrerequisiteSectionBuilder { + String skipVersionRange = null; + String skipReason = null; + List requiredYamlRunnerFeatures = new ArrayList<>(); + List skipOperatingSystems = new ArrayList<>(); + + enum XPackRequired { + NOT_SPECIFIED, + YES, + NO, + MISMATCHED + } + + XPackRequired xpackRequired = XPackRequired.NOT_SPECIFIED; + + public PrerequisiteSectionBuilder skipIfVersion(String skipVersionRange) { + this.skipVersionRange = skipVersionRange; + return this; + } + + public PrerequisiteSectionBuilder setSkipReason(String skipReason) { + this.skipReason = skipReason; + return this; + } + + public PrerequisiteSectionBuilder requireYamlRunnerFeature(String featureName) { + requiredYamlRunnerFeatures.add(featureName); + return this; + } + + public PrerequisiteSectionBuilder requireXPack() { + if (xpackRequired == XPackRequired.NO) { + xpackRequired = XPackRequired.MISMATCHED; + } else { + xpackRequired = XPackRequired.YES; + } + return this; + } + + public PrerequisiteSectionBuilder skipIfXPack() { + if (xpackRequired == XPackRequired.YES) { + xpackRequired = XPackRequired.MISMATCHED; + } else { + xpackRequired = XPackRequired.NO; + } + return this; + } + + public PrerequisiteSectionBuilder skipIfOs(String osName) { + this.skipOperatingSystems.add(osName); + return this; + } + + void validate(XContentLocation contentLocation) { + if ((Strings.hasLength(skipVersionRange) == false) + && requiredYamlRunnerFeatures.isEmpty() + && skipOperatingSystems.isEmpty() + && xpackRequired == XPackRequired.NOT_SPECIFIED) { + throw new ParsingException( + contentLocation, + "at least one criteria (version, cluster features, runner features, os) is mandatory within a skip section" + ); + } + if (Strings.hasLength(skipVersionRange) && Strings.hasLength(skipReason) == false) { + throw new ParsingException(contentLocation, "reason is mandatory within skip version section"); + } + if (skipOperatingSystems.isEmpty() == false && Strings.hasLength(skipReason) == false) { + throw new ParsingException(contentLocation, "reason is mandatory within skip os section"); + } + // make feature "skip_os" mandatory if os is given, this is a temporary solution until language client tests know about os + if (skipOperatingSystems.isEmpty() == false && requiredYamlRunnerFeatures.contains("skip_os") == false) { + throw new ParsingException(contentLocation, "if os is specified, test runner feature [skip_os] must be set"); + } + if (xpackRequired == XPackRequired.MISMATCHED) { + throw new ParsingException(contentLocation, "either [xpack] or [no_xpack] can be present, not both"); + } + } + + public PrerequisiteSection build() { + final List> skipCriteriaList = new ArrayList<>(); + final List> requiresCriteriaList; + + // Check if the test runner supports all YAML framework features (see {@link Features}). If not, default to always skip this + // section. + if (Features.areAllSupported(requiredYamlRunnerFeatures) == false) { + requiresCriteriaList = List.of(Prerequisites.FALSE); + } else { + requiresCriteriaList = new ArrayList<>(); + if (xpackRequired == XPackRequired.YES) { + requiresCriteriaList.add(Prerequisites.hasXPack()); + } + if (xpackRequired == XPackRequired.NO) { + skipCriteriaList.add(Prerequisites.hasXPack()); + } + if (Strings.hasLength(skipVersionRange)) { + skipCriteriaList.add(Prerequisites.skipOnVersionRange(skipVersionRange)); + } + if (skipOperatingSystems.isEmpty() == false) { + skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems)); + } + } + return new PrerequisiteSection(skipCriteriaList, skipReason, requiresCriteriaList, null, requiredYamlRunnerFeatures); + } + } + + /** + * Parse a {@link PrerequisiteSection} if the next field is {@code skip}, otherwise returns {@link PrerequisiteSection#EMPTY}. + */ + public static PrerequisiteSection parseIfNext(XContentParser parser) throws IOException { + return parseInternal(parser).build(); + } + + private static void maybeAdvanceToNextField(XContentParser parser) throws IOException { + var token = parser.nextToken(); + if (token != null && token != XContentParser.Token.END_ARRAY) { + ParserUtils.advanceToFieldName(parser); + } + } + + static PrerequisiteSectionBuilder parseInternal(XContentParser parser) throws IOException { + PrerequisiteSectionBuilder builder = new PrerequisiteSectionBuilder(); + var hasPrerequisiteSection = false; + var unknownFieldName = false; + ParserUtils.advanceToFieldName(parser); + while (unknownFieldName == false) { + if ("skip".equals(parser.currentName())) { + parseSkipSection(parser, builder); + hasPrerequisiteSection = true; + maybeAdvanceToNextField(parser); + } else { + unknownFieldName = true; + } + } + if (hasPrerequisiteSection) { + builder.validate(parser.getTokenLocation()); + } + return builder; + } + + private static void parseFeatureField(String feature, PrerequisiteSectionBuilder builder) { + // #31403 introduced YAML test "features" to indicate if the cluster being tested has xpack installed (`xpack`) + // or if it does *not* have xpack installed (`no_xpack`). These are not test runner features, so now that we have + // "modular" skip criteria let's separate them. Eventually, these should move to their own skip section. + if (feature.equals("xpack")) { + builder.requireXPack(); + } else if (feature.equals("no_xpack")) { + builder.skipIfXPack(); + } else { + builder.requireYamlRunnerFeature(feature); + } + } + + // package private for tests + static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException { + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException( + "Expected [" + + XContentParser.Token.START_OBJECT + + ", found [" + + parser.currentToken() + + "], the skip section is not properly indented" + ); + } + String currentFieldName = null; + XContentParser.Token token; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("version".equals(currentFieldName)) { + builder.skipIfVersion(parser.text()); + } else if ("reason".equals(currentFieldName)) { + builder.setSkipReason(parser.text()); + } else if ("features".equals(currentFieldName)) { + parseFeatureField(parser.text(), builder); + } else if ("os".equals(currentFieldName)) { + builder.skipIfOs(parser.text()); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "field " + currentFieldName + " not supported within skip section" + ); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("features".equals(currentFieldName)) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + parseFeatureField(parser.text(), builder); + } + } else if ("os".equals(currentFieldName)) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + builder.skipIfOs(parser.text()); + } + } + } + } + parser.nextToken(); + } + + public static final PrerequisiteSection EMPTY = new PrerequisiteSection(); + + private final List> skipCriteriaList; + private final List> requiresCriteriaList; + private final List yamlRunnerFeatures; + final String skipReason; + final String requireReason; + + private PrerequisiteSection() { + this.skipCriteriaList = new ArrayList<>(); + this.requiresCriteriaList = new ArrayList<>(); + this.yamlRunnerFeatures = new ArrayList<>(); + this.skipReason = null; + this.requireReason = null; + } + + PrerequisiteSection( + List> skipCriteriaList, + String skipReason, + List> requiresCriteriaList, + String requireReason, + List yamlRunnerFeatures + ) { + this.skipCriteriaList = skipCriteriaList; + this.requiresCriteriaList = requiresCriteriaList; + this.yamlRunnerFeatures = yamlRunnerFeatures; + this.skipReason = skipReason; + this.requireReason = requireReason; + } + + public boolean hasYamlRunnerFeature(String feature) { + return yamlRunnerFeatures.contains(feature); + } + + boolean skipCriteriaMet(ClientYamlTestExecutionContext context) { + return skipCriteriaList.stream().anyMatch(c -> c.test(context)); + } + + boolean requiresCriteriaMet(ClientYamlTestExecutionContext context) { + return requiresCriteriaList.stream().allMatch(c -> c.test(context)); + } + + public void evaluate(ClientYamlTestExecutionContext context, String testCandidateDescription) { + if (isEmpty()) { + return; + } + + if (requiresCriteriaMet(context) == false) { + throw new AssumptionViolatedException(buildMessage(testCandidateDescription, false)); + } + + if (skipCriteriaMet(context)) { + throw new AssumptionViolatedException(buildMessage(testCandidateDescription, true)); + } + } + + boolean isEmpty() { + return skipCriteriaList.isEmpty() && requiresCriteriaList.isEmpty() && yamlRunnerFeatures.isEmpty(); + } + + String buildMessage(String description, boolean isSkip) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("[").append(description).append("] skipped,"); + var reason = isSkip ? skipReason : requireReason; + if (Strings.isNullOrEmpty(reason) == false) { + messageBuilder.append(" reason: [").append(reason).append("]"); + } + if (yamlRunnerFeatures.isEmpty() == false) { + messageBuilder.append(" unsupported features ").append(yamlRunnerFeatures); + } + return messageBuilder.toString(); + } +} diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipCriteria.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java similarity index 54% rename from test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipCriteria.java rename to test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java index c864c778a8e76..8049c227b199e 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipCriteria.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java @@ -12,31 +12,36 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import java.util.List; +import java.util.Set; import java.util.function.Predicate; -public class SkipCriteria { +public class Prerequisites { - public static final Predicate SKIP_ALWAYS = context -> true; + public static final Predicate TRUE = context -> true; + public static final Predicate FALSE = context -> false; - private SkipCriteria() {} + private Prerequisites() {} - static Predicate fromVersionRange(String versionRange) { + static Predicate skipOnVersionRange(String versionRange) { final var versionRangePredicates = VersionRange.parseVersionRanges(versionRange); assert versionRangePredicates.isEmpty() == false; return context -> versionRangePredicates.stream().anyMatch(range -> range.test(context.nodesVersions())); } - static Predicate fromOsList(List operatingSystems) { + static Predicate skipOnOsList(List operatingSystems) { return context -> operatingSystems.stream().anyMatch(osName -> osName.equals(context.os())); } - static Predicate fromClusterModules(boolean xpackRequired) { + static Predicate hasXPack() { // TODO: change ESRestTestCase.hasXPack() to be context-specific - return context -> { - if (xpackRequired) { - return ESRestTestCase.hasXPack() == false; - } - return ESRestTestCase.hasXPack(); - }; + return context -> ESRestTestCase.hasXPack(); + } + + static Predicate requireClusterFeatures(Set clusterFeatures) { + return context -> clusterFeatures.stream().allMatch(context::clusterHasFeature); + } + + static Predicate skipOnClusterFeatures(Set clusterFeatures) { + return context -> clusterFeatures.stream().anyMatch(context::clusterHasFeature); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java index 351cfa8e40ebc..ecf37c4b5cf64 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java @@ -36,7 +36,7 @@ static SetupSection parseIfNext(XContentParser parser) throws IOException { } public static SetupSection parse(XContentParser parser) throws IOException { - SkipSection skipSection = SkipSection.parseIfNext(parser); + PrerequisiteSection prerequisiteSection = PrerequisiteSection.parseIfNext(parser); List executableSections = new ArrayList<>(); while (parser.currentToken() != XContentParser.Token.END_ARRAY) { ParserUtils.advanceToFieldName(parser); @@ -51,21 +51,21 @@ public static SetupSection parse(XContentParser parser) throws IOException { parser.nextToken(); } parser.nextToken(); - return new SetupSection(skipSection, executableSections); + return new SetupSection(prerequisiteSection, executableSections); } - public static final SetupSection EMPTY = new SetupSection(SkipSection.EMPTY, Collections.emptyList()); + public static final SetupSection EMPTY = new SetupSection(PrerequisiteSection.EMPTY, Collections.emptyList()); - private final SkipSection skipSection; + private final PrerequisiteSection prerequisiteSection; private final List executableSections; - public SetupSection(SkipSection skipSection, List executableSections) { - this.skipSection = Objects.requireNonNull(skipSection, "skip section cannot be null"); + public SetupSection(PrerequisiteSection prerequisiteSection, List executableSections) { + this.prerequisiteSection = Objects.requireNonNull(prerequisiteSection, "skip section cannot be null"); this.executableSections = Collections.unmodifiableList(executableSections); } - public SkipSection getSkipSection() { - return skipSection; + public PrerequisiteSection getPrerequisiteSection() { + return prerequisiteSection; } public List getExecutableSections() { diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java deleted file mode 100644 index 4bd80fa4d9f13..0000000000000 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.test.rest.yaml.section; - -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; -import org.elasticsearch.test.rest.yaml.Features; -import org.elasticsearch.xcontent.XContentLocation; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Predicate; - -/** - * Represents a skip section that tells whether a specific test section or suite needs to be skipped - * based on: - * - the elasticsearch version the tests are running against - * - a specific test feature required that might not be implemented yet by the runner - * - an operating system (full name, including specific Linux distributions) that might show a certain behavior - */ -public class SkipSection { - - static class SkipSectionBuilder { - String version = null; - String reason = null; - List testFeatures = new ArrayList<>(); - List operatingSystems = new ArrayList<>(); - - enum XPackRequested { - NOT_SPECIFIED, - YES, - NO, - MISMATCHED - } - - XPackRequested xpackRequested = XPackRequested.NOT_SPECIFIED; - - public SkipSectionBuilder withVersion(String version) { - this.version = version; - return this; - } - - public SkipSectionBuilder withReason(String reason) { - this.reason = reason; - return this; - } - - public SkipSectionBuilder withTestFeature(String featureName) { - this.testFeatures.add(featureName); - return this; - } - - public void withXPack(boolean xpackRequired) { - if (xpackRequired && xpackRequested == XPackRequested.NO || xpackRequired == false && xpackRequested == XPackRequested.YES) { - xpackRequested = XPackRequested.MISMATCHED; - } else { - xpackRequested = xpackRequired ? XPackRequested.YES : XPackRequested.NO; - } - } - - public SkipSectionBuilder withOs(String osName) { - this.operatingSystems.add(osName); - return this; - } - - void validate(XContentLocation contentLocation) { - if ((Strings.hasLength(version) == false) - && testFeatures.isEmpty() - && operatingSystems.isEmpty() - && xpackRequested == XPackRequested.NOT_SPECIFIED) { - throw new ParsingException( - contentLocation, - "at least one criteria (version, test features, os) is mandatory within a skip section" - ); - } - if (Strings.hasLength(version) && Strings.hasLength(reason) == false) { - throw new ParsingException(contentLocation, "reason is mandatory within skip version section"); - } - if (operatingSystems.isEmpty() == false && Strings.hasLength(reason) == false) { - throw new ParsingException(contentLocation, "reason is mandatory within skip version section"); - } - // make feature "skip_os" mandatory if os is given, this is a temporary solution until language client tests know about os - if (operatingSystems.isEmpty() == false && testFeatures.contains("skip_os") == false) { - throw new ParsingException(contentLocation, "if os is specified, feature skip_os must be set"); - } - if (xpackRequested == XPackRequested.MISMATCHED) { - throw new ParsingException(contentLocation, "either `xpack` or `no_xpack` can be present, not both"); - } - } - - public SkipSection build() { - final List> skipCriteriaList; - - // Check if the test runner supports all YAML framework features (see {@link Features}). If not, default to always skip this - // section. - if (Features.areAllSupported(testFeatures) == false) { - skipCriteriaList = List.of(SkipCriteria.SKIP_ALWAYS); - } else { - skipCriteriaList = new ArrayList<>(); - if (xpackRequested == XPackRequested.YES || xpackRequested == XPackRequested.NO) { - skipCriteriaList.add(SkipCriteria.fromClusterModules(xpackRequested == XPackRequested.YES)); - } - if (Strings.hasLength(version)) { - skipCriteriaList.add(SkipCriteria.fromVersionRange(version)); - } - if (operatingSystems.isEmpty() == false) { - skipCriteriaList.add(SkipCriteria.fromOsList(operatingSystems)); - } - } - return new SkipSection(skipCriteriaList, testFeatures, reason); - } - } - - /** - * Parse a {@link SkipSection} if the next field is {@code skip}, otherwise returns {@link SkipSection#EMPTY}. - */ - public static SkipSection parseIfNext(XContentParser parser) throws IOException { - ParserUtils.advanceToFieldName(parser); - - if ("skip".equals(parser.currentName())) { - SkipSection section = parse(parser); - parser.nextToken(); - return section; - } - - return EMPTY; - } - - public static SkipSection parse(XContentParser parser) throws IOException { - return parseInternal(parser).build(); - } - - private static void parseFeature(String feature, SkipSectionBuilder builder) { - // #31403 introduced YAML test "features" to indicate if the cluster being tested has xpack installed (`xpack`) - // or if it does *not* have xpack installed (`no_xpack`). These are not test runner features, so now that we have - // "modular" skip criteria let's separate them. Eventually, these should move to their own skip section. - if (feature.equals("xpack")) { - builder.withXPack(true); - } else if (feature.equals("no_xpack")) { - builder.withXPack(false); - } else { - builder.withTestFeature(feature); - } - } - - // package private for tests - static SkipSectionBuilder parseInternal(XContentParser parser) throws IOException { - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException( - "Expected [" - + XContentParser.Token.START_OBJECT - + ", found [" - + parser.currentToken() - + "], the skip section is not properly indented" - ); - } - String currentFieldName = null; - XContentParser.Token token; - - var builder = new SkipSectionBuilder(); - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("version".equals(currentFieldName)) { - builder.withVersion(parser.text()); - } else if ("reason".equals(currentFieldName)) { - builder.withReason(parser.text()); - } else if ("features".equals(currentFieldName)) { - parseFeature(parser.text(), builder); - } else if ("os".equals(currentFieldName)) { - builder.withOs(parser.text()); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "field " + currentFieldName + " not supported within skip section" - ); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if ("features".equals(currentFieldName)) { - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - parseFeature(parser.text(), builder); - } - } else if ("os".equals(currentFieldName)) { - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - builder.withOs(parser.text()); - } - } - } - } - - parser.nextToken(); - builder.validate(parser.getTokenLocation()); - return builder; - } - - public static final SkipSection EMPTY = new SkipSection(); - - private final List> skipCriteriaList; - private final List yamlRunnerFeatures; - private final String reason; - - private SkipSection() { - this.skipCriteriaList = new ArrayList<>(); - this.yamlRunnerFeatures = new ArrayList<>(); - this.reason = null; - } - - SkipSection(List> skipCriteriaList, List yamlRunnerFeatures, String reason) { - this.skipCriteriaList = skipCriteriaList; - this.yamlRunnerFeatures = yamlRunnerFeatures; - this.reason = reason; - } - - public boolean yamlRunnerHasFeature(String feature) { - return yamlRunnerFeatures.contains(feature); - } - - public String getReason() { - return reason; - } - - public boolean skip(ClientYamlTestExecutionContext context) { - if (isEmpty()) { - return false; - } - - return skipCriteriaList.stream().anyMatch(c -> c.test(context)); - } - - public boolean isEmpty() { - return EMPTY.equals(this); - } - - public String getSkipMessage(String description) { - StringBuilder messageBuilder = new StringBuilder(); - messageBuilder.append("[").append(description).append("] skipped,"); - if (reason != null) { - messageBuilder.append(" reason: [").append(getReason()).append("]"); - } - if (yamlRunnerFeatures.isEmpty() == false) { - messageBuilder.append(" unsupported features ").append(yamlRunnerFeatures); - } - return messageBuilder.toString(); - } -} diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java index 6821378463749..ca76ee92bb3c5 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java @@ -35,7 +35,7 @@ static TeardownSection parseIfNext(XContentParser parser) throws IOException { } public static TeardownSection parse(XContentParser parser) throws IOException { - SkipSection skipSection = SkipSection.parseIfNext(parser); + PrerequisiteSection prerequisiteSection = PrerequisiteSection.parseIfNext(parser); List executableSections = new ArrayList<>(); while (parser.currentToken() != XContentParser.Token.END_ARRAY) { ParserUtils.advanceToFieldName(parser); @@ -50,21 +50,21 @@ public static TeardownSection parse(XContentParser parser) throws IOException { } parser.nextToken(); - return new TeardownSection(skipSection, executableSections); + return new TeardownSection(prerequisiteSection, executableSections); } - public static final TeardownSection EMPTY = new TeardownSection(SkipSection.EMPTY, Collections.emptyList()); + public static final TeardownSection EMPTY = new TeardownSection(PrerequisiteSection.EMPTY, Collections.emptyList()); - private final SkipSection skipSection; + private final PrerequisiteSection prerequisiteSection; private final List doSections; - TeardownSection(SkipSection skipSection, List doSections) { - this.skipSection = Objects.requireNonNull(skipSection, "skip section cannot be null"); + TeardownSection(PrerequisiteSection prerequisiteSection, List doSections) { + this.prerequisiteSection = Objects.requireNonNull(prerequisiteSection, "skip section cannot be null"); this.doSections = Collections.unmodifiableList(doSections); } - public SkipSection getSkipSection() { - return skipSection; + public PrerequisiteSection getPrerequisiteSection() { + return prerequisiteSection; } public List getDoSections() { diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java index 6e8397c816b3b..94b80fcc3fab3 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java @@ -29,6 +29,11 @@ private static class MockTestFeatureService implements TestFeatureService { public boolean clusterHasFeature(String featureId) { return true; } + + @Override + public Set getAllSupportedFeatures() { + return Set.of(); + } } public void testHeadersSupportStashedValueReplacement() throws IOException { diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 0ee275fc89c15..2c6e7e30e0d46 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -69,7 +69,7 @@ public void testParseTestSectionWithDoSection() throws Exception { assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); - assertThat(testSection.getSkipSection(), equalTo(SkipSection.EMPTY)); + assertThat(testSection.getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(testSection.getExecutableSections().size(), equalTo(1)); DoSection doSection = (DoSection) testSection.getExecutableSections().get(0); assertThat(doSection.getCatch(), equalTo("missing")); @@ -96,8 +96,8 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); - assertThat(testSection.getSkipSection(), notNullValue()); - assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); + assertThat(testSection.getPrerequisiteSection(), notNullValue()); + assertThat(testSection.getPrerequisiteSection().skipReason, equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection) testSection.getExecutableSections().get(0); assertThat(doSection.getCatch(), equalTo("missing")); @@ -130,7 +130,7 @@ public void testParseTestSectionWithMultipleDoSections() throws Exception { assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("Basic")); - assertThat(testSection.getSkipSection(), equalTo(SkipSection.EMPTY)); + assertThat(testSection.getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection) testSection.getExecutableSections().get(0); assertThat(doSection.getCatch(), nullValue()); @@ -181,7 +181,7 @@ public void testParseTestSectionWithDoSectionsAndAssertions() throws Exception { assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("Basic")); - assertThat(testSection.getSkipSection(), equalTo(SkipSection.EMPTY)); + assertThat(testSection.getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(testSection.getExecutableSections().size(), equalTo(10)); DoSection doSection = (DoSection) testSection.getExecutableSections().get(0); diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java index c64a30378e9d6..edc043e15527d 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java @@ -34,6 +34,51 @@ import static org.hamcrest.Matchers.nullValue; public class ClientYamlTestSuiteTests extends AbstractClientYamlTestFragmentParserTestCase { + + public void testParseTestSetupWithSkip() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + --- + setup: + - skip: + version: "8.7.00 - 8.9.99" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + + --- + date: + - skip: + version: " - 8.1.99" + reason: tsdb indexing changed in 8.2.0 + - do: + indices.get_mapping: + index: test_index + + - match: {test_index.test_type.properties.text.type: string} + - match: {test_index.test_type.properties.text.analyzer: whitespace} + """); + + ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser); + + assertThat(restTestSuite, notNullValue()); + assertThat(restTestSuite.getName(), equalTo(getTestName())); + assertThat(restTestSuite.getFile().isPresent(), equalTo(false)); + assertThat(restTestSuite.getSetupSection(), notNullValue()); + + assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getSetupSection().getPrerequisiteSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getSetupSection().getExecutableSections().isEmpty(), equalTo(true)); + + assertThat(restTestSuite.getTestSections().size(), equalTo(1)); + + assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("date")); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(3)); + assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class)); + DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0); + assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_mapping")); + assertThat(doSection.getApiCallSection().getParams().size(), equalTo(1)); + assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index")); + } + public void testParseTestSetupTeardownAndSections() throws Exception { final boolean includeSetup = randomBoolean(); final boolean includeTeardown = randomBoolean(); @@ -92,7 +137,7 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(restTestSuite.getSetupSection(), notNullValue()); if (includeSetup) { assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false)); - assertThat(restTestSuite.getSetupSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getSetupSection().getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getSetupSection().getExecutableSections().size(), equalTo(1)); final ExecutableSection maybeDoSection = restTestSuite.getSetupSection().getExecutableSections().get(0); assertThat(maybeDoSection, instanceOf(DoSection.class)); @@ -107,7 +152,7 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(restTestSuite.getTeardownSection(), notNullValue()); if (includeTeardown) { assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(false)); - assertThat(restTestSuite.getTeardownSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTeardownSection().getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTeardownSection().getDoSections().size(), equalTo(1)); assertThat( ((DoSection) restTestSuite.getTeardownSection().getDoSections().get(0)).getApiCallSection().getApi(), @@ -128,7 +173,7 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(restTestSuite.getTestSections().size(), equalTo(2)); assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Get index mapping")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class)); DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0); @@ -145,9 +190,9 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace")); assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 6.0")); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getTestSections().get(1).getPrerequisiteSection().isEmpty(), equalTo(false)); assertThat( - restTestSuite.getTestSections().get(1).getSkipSection().getReason(), + restTestSuite.getTestSections().get(1).getPrerequisiteSection().skipReason, equalTo("for newer versions the index name is always returned") ); @@ -209,7 +254,7 @@ public void testParseTestSingleTestSection() throws Exception { assertThat(restTestSuite.getTestSections().size(), equalTo(1)); assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Index with ID")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(12)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class)); DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0); @@ -322,7 +367,7 @@ public void testParseTestMultipleTestSections() throws Exception { assertThat(restTestSuite.getTestSections().size(), equalTo(2)); assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Missing document (partial doc)")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(2)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class)); @@ -339,7 +384,7 @@ public void testParseTestMultipleTestSections() throws Exception { assertThat(doSection.getApiCallSection().hasBody(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Missing document (script)")); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(1).getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(2)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(1), instanceOf(DoSection.class)); @@ -418,9 +463,9 @@ public void testParseSkipOs() throws Exception { assertThat(restTestSuite.getTestSections().size(), equalTo(1)); assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Broken on some os")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().isEmpty(), equalTo(false)); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().getReason(), equalTo("not supported")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().yamlRunnerHasFeature("skip_os"), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().skipReason, containsString("not supported")); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().hasYamlRunnerFeature("skip_os"), equalTo(true)); } public void testParseFileWithSingleTestSection() throws Exception { @@ -453,7 +498,7 @@ public void testParseFileWithSingleTestSection() throws Exception { assertThat(restTestSuite.getTestSections().size(), equalTo(1)); assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Index with ID")); - assertThat(restTestSuite.getTestSections().get(0).getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(2)); assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class)); DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0); @@ -473,7 +518,7 @@ public void testAddingDoWithoutSkips() { ClientYamlTestSection section = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, Collections.singletonList(doSection) ); ClientYamlTestSuite clientYamlTestSuite = new ClientYamlTestSuite( @@ -492,7 +537,7 @@ public void testAddingDoWithWarningWithoutSkipWarnings() { DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setExpectedWarningHeaders(singletonList("foo")); doSection.setApiCallSection(new ApiCallSection("test")); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -506,7 +551,7 @@ public void testAddingDoWithWarningRegexWithoutSkipWarnings() { DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setExpectedWarningHeadersRegex(singletonList(Pattern.compile("foo"))); doSection.setApiCallSection(new ApiCallSection("test")); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -520,7 +565,7 @@ public void testAddingDoWithAllowedWarningWithoutSkipAllowedWarnings() { DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setAllowedWarningHeaders(singletonList("foo")); doSection.setApiCallSection(new ApiCallSection("test")); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -535,7 +580,7 @@ public void testAddingDoWithAllowedWarningRegexWithoutSkipAllowedWarnings() { DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setAllowedWarningHeadersRegex(singletonList(Pattern.compile("foo"))); doSection.setApiCallSection(new ApiCallSection("test")); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -551,7 +596,7 @@ public void testAddingDoWithHeaderWithoutSkipHeaders() { ApiCallSection apiCallSection = new ApiCallSection("test"); apiCallSection.addHeaders(Collections.singletonMap("header", "value")); doSection.setApiCallSection(apiCallSection); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -566,7 +611,7 @@ public void testAddingDoWithNodeSelectorWithoutSkipNodeSelector() { ApiCallSection apiCall = new ApiCallSection("test"); apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, doSection); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -582,7 +627,7 @@ public void testAddingContainsWithoutSkipContains() { randomAlphaOfLength(randomIntBetween(3, 30)), randomDouble() ); - ClientYamlTestSuite testSuite = createTestSuite(SkipSection.EMPTY, containsAssertion); + ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, containsAssertion); Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate); assertThat(e.getMessage(), containsString(Strings.format(""" api/name: @@ -604,7 +649,7 @@ public void testMultipleValidationErrors() { new ClientYamlTestSection( new XContentLocation(0, 0), "section1", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, Collections.singletonList(containsAssertion) ) ); @@ -625,7 +670,7 @@ public void testMultipleValidationErrors() { doSection.setApiCallSection(apiCall); doSections.add(doSection); } - sections.add(new ClientYamlTestSection(new XContentLocation(0, 0), "section2", SkipSection.EMPTY, doSections)); + sections.add(new ClientYamlTestSection(new XContentLocation(0, 0), "section2", PrerequisiteSection.EMPTY, doSections)); ClientYamlTestSuite testSuite = new ClientYamlTestSuite( "api", @@ -648,13 +693,17 @@ public void testMultipleValidationErrors() { """, firstLineNumber, secondLineNumber, thirdLineNumber), e.getMessage()); } + private static PrerequisiteSection createPrerequisiteSection(String yamlTestRunnerFeature) { + return new PrerequisiteSection(emptyList(), null, emptyList(), null, singletonList(yamlTestRunnerFeature)); + } + public void testAddingDoWithWarningWithSkip() { int lineNumber = between(1, 10000); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setExpectedWarningHeaders(singletonList("foo")); doSection.setApiCallSection(new ApiCallSection("test")); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("warnings"), null); - createTestSuite(skipSection, doSection).validate(); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("warnings"); + createTestSuite(prerequisiteSection, doSection).validate(); } public void testAddingDoWithWarningRegexWithSkip() { @@ -662,86 +711,86 @@ public void testAddingDoWithWarningRegexWithSkip() { DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); doSection.setExpectedWarningHeadersRegex(singletonList(Pattern.compile("foo"))); doSection.setApiCallSection(new ApiCallSection("test")); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("warnings_regex"), null); - createTestSuite(skipSection, doSection).validate(); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("warnings_regex"); + createTestSuite(prerequisiteSection, doSection).validate(); } public void testAddingDoWithNodeSelectorWithSkip() { int lineNumber = between(1, 10000); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("node_selector"), null); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("node_selector"); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); - createTestSuite(skipSection, doSection).validate(); + createTestSuite(prerequisiteSection, doSection).validate(); } public void testAddingDoWithHeadersWithSkip() { int lineNumber = between(1, 10000); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("headers"), null); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("headers"); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCallSection = new ApiCallSection("test"); apiCallSection.addHeaders(singletonMap("foo", "bar")); doSection.setApiCallSection(apiCallSection); - createTestSuite(skipSection, doSection).validate(); + createTestSuite(prerequisiteSection, doSection).validate(); } public void testAddingContainsWithSkip() { int lineNumber = between(1, 10000); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("contains"), null); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("contains"); ContainsAssertion containsAssertion = new ContainsAssertion( new XContentLocation(lineNumber, 0), randomAlphaOfLength(randomIntBetween(3, 30)), randomDouble() ); - createTestSuite(skipSection, containsAssertion).validate(); + createTestSuite(prerequisiteSection, containsAssertion).validate(); } public void testAddingCloseToWithSkip() { int lineNumber = between(1, 10000); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("close_to"), null); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("close_to"); CloseToAssertion closeToAssertion = new CloseToAssertion( new XContentLocation(lineNumber, 0), randomAlphaOfLength(randomIntBetween(3, 30)), randomDouble(), randomDouble() ); - createTestSuite(skipSection, closeToAssertion).validate(); + createTestSuite(prerequisiteSection, closeToAssertion).validate(); } public void testAddingIsAfterWithSkip() { int lineNumber = between(1, 10000); - SkipSection skipSection = new SkipSection(emptyList(), singletonList("is_after"), null); + PrerequisiteSection prerequisiteSection = createPrerequisiteSection("is_after"); IsAfterAssertion isAfterAssertion = new IsAfterAssertion( new XContentLocation(lineNumber, 0), randomAlphaOfLength(randomIntBetween(3, 30)), randomInstantBetween(Instant.ofEpochSecond(0L), Instant.ofEpochSecond(3000000000L)) ); - createTestSuite(skipSection, isAfterAssertion).validate(); + createTestSuite(prerequisiteSection, isAfterAssertion).validate(); } - private static ClientYamlTestSuite createTestSuite(SkipSection skipSection, ExecutableSection executableSection) { + private static ClientYamlTestSuite createTestSuite(PrerequisiteSection prerequisiteSection, ExecutableSection executableSection) { final SetupSection setupSection; final TeardownSection teardownSection; final ClientYamlTestSection clientYamlTestSection; switch (randomIntBetween(0, 4)) { case 0 -> { - setupSection = new SetupSection(skipSection, Collections.emptyList()); + setupSection = new SetupSection(prerequisiteSection, Collections.emptyList()); teardownSection = TeardownSection.EMPTY; clientYamlTestSection = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, Collections.singletonList(executableSection) ); } case 1 -> { setupSection = SetupSection.EMPTY; - teardownSection = new TeardownSection(skipSection, Collections.emptyList()); + teardownSection = new TeardownSection(prerequisiteSection, Collections.emptyList()); clientYamlTestSection = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, Collections.singletonList(executableSection) ); } @@ -751,27 +800,27 @@ private static ClientYamlTestSuite createTestSuite(SkipSection skipSection, Exec clientYamlTestSection = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - skipSection, + prerequisiteSection, Collections.singletonList(executableSection) ); } case 3 -> { - setupSection = new SetupSection(skipSection, Collections.singletonList(executableSection)); + setupSection = new SetupSection(prerequisiteSection, Collections.singletonList(executableSection)); teardownSection = TeardownSection.EMPTY; clientYamlTestSection = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, randomBoolean() ? Collections.emptyList() : Collections.singletonList(executableSection) ); } case 4 -> { setupSection = SetupSection.EMPTY; - teardownSection = new TeardownSection(skipSection, Collections.singletonList(executableSection)); + teardownSection = new TeardownSection(prerequisiteSection, Collections.singletonList(executableSection)); clientYamlTestSection = new ClientYamlTestSection( new XContentLocation(0, 0), "test", - SkipSection.EMPTY, + PrerequisiteSection.EMPTY, randomBoolean() ? Collections.emptyList() : Collections.singletonList(executableSection) ); } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java new file mode 100644 index 0000000000000..b02658694d82f --- /dev/null +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java @@ -0,0 +1,539 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.rest.yaml.section; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.yaml.YamlXContent; +import org.junit.AssumptionViolatedException; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyOrNullString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class PrerequisiteSectionTests extends AbstractClientYamlTestFragmentParserTestCase { + + public void testSkipVersionMultiRange() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnVersionRange("6.0.0 - 6.1.0, 7.1.0 - 7.5.0")), + "foobar", + emptyList(), + "foobar", + emptyList() + ); + + var outOfRangeMockContext = mock(ClientYamlTestExecutionContext.class); + when(outOfRangeMockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())) + .thenReturn(Set.of("6.2.0")) + .thenReturn(Set.of("7.0.0")) + .thenReturn(Set.of("7.6.0")); + + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + + var inRangeMockContext = mock(ClientYamlTestExecutionContext.class); + when(inRangeMockContext.nodesVersions()).thenReturn(Set.of("6.0.0")) + .thenReturn(Set.of("6.1.0")) + .thenReturn(Set.of("7.1.0")) + .thenReturn(Set.of("7.5.0")); + + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + } + + public void testSkipVersionMultiOpenRange() { + var section = new PrerequisiteSection( + List.of(Prerequisites.skipOnVersionRange("- 7.1.0, 7.2.0 - 7.5.0, 8.0.0 -")), + "foobar", + emptyList(), + "foobar", + emptyList() + ); + + var outOfRangeMockContext = mock(ClientYamlTestExecutionContext.class); + when(outOfRangeMockContext.nodesVersions()).thenReturn(Set.of("7.1.1")).thenReturn(Set.of("7.6.0")); + + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + assertFalse(section.skipCriteriaMet(outOfRangeMockContext)); + + var inRangeMockContext = mock(ClientYamlTestExecutionContext.class); + when(inRangeMockContext.nodesVersions()).thenReturn(Set.of("7.0.0")) + .thenReturn(Set.of("7.3.0")) + .thenReturn(Set.of("8.0.0")) + .thenReturn(Set.of(Version.CURRENT.toString())); + + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + assertTrue(section.skipCriteriaMet(inRangeMockContext)); + } + + public void testSkipVersion() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnVersionRange("6.0.0 - 6.1.0")), + "foobar", + emptyList(), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())) + .thenReturn(Set.of("6.0.0")) + .thenReturn(Set.of("6.0.0", "6.1.0")) + .thenReturn(Set.of("6.0.0", "5.2.0")); + + assertFalse(section.skipCriteriaMet(mockContext)); + assertTrue(section.skipCriteriaMet(mockContext)); + assertTrue(section.skipCriteriaMet(mockContext)); + assertFalse(section.skipCriteriaMet(mockContext)); + } + + public void testSkipVersionWithTestFeatures() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnVersionRange("6.0.0 - 6.1.0")), + "foobar", + emptyList(), + "foobar", + singletonList("warnings") + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())).thenReturn(Set.of("6.0.0")); + + assertFalse(section.skipCriteriaMet(mockContext)); + assertTrue(section.skipCriteriaMet(mockContext)); + } + + public void testSkipTestFeatures() { + var section = new PrerequisiteSection.PrerequisiteSectionBuilder().requireYamlRunnerFeature("boom").build(); + assertFalse(section.requiresCriteriaMet(mock(ClientYamlTestExecutionContext.class))); + } + + public void testSkipTestFeaturesOverridesAnySkipCriteria() { + var section = new PrerequisiteSection.PrerequisiteSectionBuilder().skipIfOs("test-os").requireYamlRunnerFeature("boom").build(); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.os()).thenReturn("test-os"); + + // Skip even if OS is matching + assertFalse(section.skipCriteriaMet(mockContext)); + assertFalse(section.requiresCriteriaMet(mockContext)); + } + + public void testSkipOs() { + PrerequisiteSection section = new PrerequisiteSection.PrerequisiteSectionBuilder().skipIfOs("windows95") + .skipIfOs("debian-5") + .build(); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + + when(mockContext.os()).thenReturn("debian-5"); + assertTrue(section.skipCriteriaMet(mockContext)); + + when(mockContext.os()).thenReturn("windows95"); + assertTrue(section.skipCriteriaMet(mockContext)); + + when(mockContext.os()).thenReturn("ms-dos"); + assertFalse(section.skipCriteriaMet(mockContext)); + + assertTrue(section.requiresCriteriaMet(mockContext)); + } + + public void testSkipOsWithTestFeatures() { + PrerequisiteSection section = new PrerequisiteSection.PrerequisiteSectionBuilder().requireYamlRunnerFeature("warnings") + .skipIfOs("windows95") + .skipIfOs("debian-5") + .build(); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.os()).thenReturn("debian-5"); + assertTrue(section.skipCriteriaMet(mockContext)); + + when(mockContext.os()).thenReturn("windows95"); + assertTrue(section.skipCriteriaMet(mockContext)); + + when(mockContext.os()).thenReturn("ms-dos"); + assertFalse(section.skipCriteriaMet(mockContext)); + } + + public void testBuildMessage() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnVersionRange("6.0.0 - 6.1.0")), + "unsupported", + emptyList(), + "required", + singletonList("warnings") + ); + assertEquals("[FOOBAR] skipped, reason: [unsupported] unsupported features [warnings]", section.buildMessage("FOOBAR", true)); + assertEquals("[FOOBAR] skipped, reason: [required] unsupported features [warnings]", section.buildMessage("FOOBAR", false)); + section = new PrerequisiteSection(emptyList(), "unsupported", emptyList(), "required", emptyList()); + assertEquals("[FOOBAR] skipped, reason: [unsupported]", section.buildMessage("FOOBAR", true)); + assertEquals("[FOOBAR] skipped, reason: [required]", section.buildMessage("FOOBAR", false)); + section = new PrerequisiteSection(emptyList(), null, emptyList(), null, singletonList("warnings")); + assertEquals("[FOOBAR] skipped, unsupported features [warnings]", section.buildMessage("FOOBAR", true)); + assertEquals("[FOOBAR] skipped, unsupported features [warnings]", section.buildMessage("FOOBAR", false)); + } + + public void testParseNoPrerequisites() throws IOException { + parser = createParser(YamlXContent.yamlXContent, """ + do: + something + """); + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + + var skipSection = skipSectionBuilder.build(); + assertThat(skipSection.isEmpty(), equalTo(true)); + + // Ensure the input (bogus execute section) was not consumed + var next = ParserUtils.parseField(parser); + assertThat(next, notNullValue()); + assertThat(parser.nextToken(), nullValue()); + } + + public void testParseSkipSectionVersionNoFeature() throws Exception { + Version version = VersionUtils.randomVersion(random()); + parser = createParser(YamlXContent.yamlXContent, Strings.format(""" + version: " - %s" + reason: Delete ignores the parent param""", version)); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, not(emptyOrNullString())); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures.size(), equalTo(0)); + assertThat(skipSectionBuilder.skipReason, equalTo("Delete ignores the parent param")); + } + + public void testParseSkipSectionFeatureNoVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, "features: regex"); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, contains("regex")); + assertThat(skipSectionBuilder.skipReason, nullValue()); + assertThat(skipSectionBuilder.xpackRequired, is(PrerequisiteSection.PrerequisiteSectionBuilder.XPackRequired.NOT_SPECIFIED)); + } + + public void testParseXPackFeature() throws IOException { + parser = createParser(YamlXContent.yamlXContent, "features: xpack"); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, empty()); + assertThat(skipSectionBuilder.skipReason, nullValue()); + assertThat(skipSectionBuilder.xpackRequired, is(PrerequisiteSection.PrerequisiteSectionBuilder.XPackRequired.YES)); + } + + public void testParseNoXPackFeature() throws IOException { + parser = createParser(YamlXContent.yamlXContent, "features: no_xpack"); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, empty()); + assertThat(skipSectionBuilder.skipReason, nullValue()); + assertThat(skipSectionBuilder.xpackRequired, is(PrerequisiteSection.PrerequisiteSectionBuilder.XPackRequired.NO)); + } + + public void testParseBothXPackFeatures() throws IOException { + parser = createParser(YamlXContent.yamlXContent, """ + skip: + features: [xpack, no_xpack] + """); + + var e = expectThrows(ParsingException.class, () -> PrerequisiteSection.parseInternal(parser)); + assertThat(e.getMessage(), containsString("either [xpack] or [no_xpack] can be present, not both")); + } + + public void testParseSkipSectionFeaturesNoVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, "features: [regex1,regex2,regex3]"); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, contains("regex1", "regex2", "regex3")); + assertThat(skipSectionBuilder.skipReason, nullValue()); + } + + public void testParseSkipSectionBothFeatureAndVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + version: " - 0.90.2" + features: regex + reason: Delete ignores the parent param"""); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder.skipVersionRange, not(emptyOrNullString())); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, contains("regex")); + assertThat(skipSectionBuilder.skipReason, equalTo("Delete ignores the parent param")); + } + + public void testParseSkipSectionNoReason() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + skip: + version: " - 0.90.2" + """); + + Exception e = expectThrows(ParsingException.class, () -> PrerequisiteSection.parseInternal(parser)); + assertThat(e.getMessage(), is("reason is mandatory within skip version section")); + } + + public void testParseSkipSectionNoVersionNorFeature() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + skip: + reason: Delete ignores the parent param + """); + + Exception e = expectThrows(ParsingException.class, () -> PrerequisiteSection.parseInternal(parser)); + assertThat( + e.getMessage(), + is("at least one criteria (version, cluster features, runner features, os) is mandatory within a skip section") + ); + } + + public void testParseSkipSectionOsNoVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + features: ["skip_os", "some_feature"] + os: debian-9 + reason: memory accounting broken, see gh#xyz + """); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, hasSize(2)); + assertThat(skipSectionBuilder.skipOperatingSystems, contains("debian-9")); + assertThat(skipSectionBuilder.skipReason, is("memory accounting broken, see gh#xyz")); + } + + public void testParseSkipSectionOsListNoVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + features: skip_os + os: [debian-9,windows-95,ms-dos] + reason: see gh#xyz + """); + + var skipSectionBuilder = new PrerequisiteSection.PrerequisiteSectionBuilder(); + PrerequisiteSection.parseSkipSection(parser, skipSectionBuilder); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, hasSize(1)); + assertThat(skipSectionBuilder.skipOperatingSystems, containsInAnyOrder("debian-9", "windows-95", "ms-dos")); + assertThat(skipSectionBuilder.skipReason, is("see gh#xyz")); + } + + public void testParseSkipSectionOsListTestFeaturesInRequires() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + - skip: + features: [skip_os] + os: [debian-9,windows-95,ms-dos] + reason: see gh#xyz + """); + + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + assertThat(skipSectionBuilder.skipVersionRange, emptyOrNullString()); + assertThat(skipSectionBuilder.requiredYamlRunnerFeatures, hasSize(1)); + assertThat(skipSectionBuilder.skipOperatingSystems, containsInAnyOrder("debian-9", "windows-95", "ms-dos")); + assertThat(skipSectionBuilder.skipReason, is("see gh#xyz")); + + assertThat(parser.currentToken(), equalTo(XContentParser.Token.END_ARRAY)); + assertThat(parser.nextToken(), nullValue()); + } + + public void testParseSkipSectionOsNoFeatureNoVersion() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + skip: + os: debian-9 + reason: memory accounting broken, see gh#xyz + """); + + Exception e = expectThrows(ParsingException.class, () -> PrerequisiteSection.parseInternal(parser)); + assertThat(e.getMessage(), is("if os is specified, test runner feature [skip_os] must be set")); + } + + public void testSkipClusterFeaturesAllRequiredMatch() { + PrerequisiteSection section = new PrerequisiteSection( + emptyList(), + "foobar", + List.of(Prerequisites.requireClusterFeatures(Set.of("required-feature-1", "required-feature-2"))), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.clusterHasFeature("required-feature-1")).thenReturn(true); + when(mockContext.clusterHasFeature("required-feature-2")).thenReturn(true); + + assertFalse(section.skipCriteriaMet(mockContext)); + assertTrue(section.requiresCriteriaMet(mockContext)); + } + + public void testSkipClusterFeaturesSomeRequiredMatch() { + PrerequisiteSection section = new PrerequisiteSection( + emptyList(), + "foobar", + List.of(Prerequisites.requireClusterFeatures(Set.of("required-feature-1", "required-feature-2"))), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.clusterHasFeature("required-feature-1")).thenReturn(true); + when(mockContext.clusterHasFeature("required-feature-2")).thenReturn(false); + + assertFalse(section.skipCriteriaMet(mockContext)); + assertFalse(section.requiresCriteriaMet(mockContext)); + } + + public void testSkipClusterFeaturesSomeToSkipMatch() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnClusterFeatures(Set.of("undesired-feature-1", "undesired-feature-2"))), + "foobar", + emptyList(), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.clusterHasFeature("undesired-feature-1")).thenReturn(true); + + assertTrue(section.skipCriteriaMet(mockContext)); + } + + public void testSkipClusterFeaturesNoneToSkipMatch() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnClusterFeatures(Set.of("undesired-feature-1", "undesired-feature-2"))), + "foobar", + emptyList(), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + assertFalse(section.skipCriteriaMet(mockContext)); + } + + public void testSkipClusterFeaturesAllRequiredSomeToSkipMatch() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnClusterFeatures(Set.of("undesired-feature-1", "undesired-feature-2"))), + "foobar", + List.of(Prerequisites.requireClusterFeatures(Set.of("required-feature-1", "required-feature-2"))), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.clusterHasFeature("required-feature-1")).thenReturn(true); + when(mockContext.clusterHasFeature("required-feature-2")).thenReturn(true); + when(mockContext.clusterHasFeature("undesired-feature-1")).thenReturn(true); + + assertTrue(section.skipCriteriaMet(mockContext)); + assertTrue(section.requiresCriteriaMet(mockContext)); + } + + public void testSkipClusterFeaturesAllRequiredNoneToSkipMatch() { + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipOnClusterFeatures(Set.of("undesired-feature-1", "undesired-feature-2"))), + "foobar", + List.of(Prerequisites.requireClusterFeatures(Set.of("required-feature-1", "required-feature-2"))), + "foobar", + emptyList() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + when(mockContext.clusterHasFeature("required-feature-1")).thenReturn(true); + when(mockContext.clusterHasFeature("required-feature-2")).thenReturn(true); + + assertFalse(section.skipCriteriaMet(mockContext)); + assertTrue(section.requiresCriteriaMet(mockContext)); + } + + public void evaluateEmpty() { + var section = new PrerequisiteSection(List.of(), "unsupported", List.of(), "required", List.of()); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + section.evaluate(mockContext, "TEST"); + } + + public void evaluateRequiresCriteriaTrue() { + var section = new PrerequisiteSection(List.of(), "unsupported", List.of(Prerequisites.TRUE), "required", List.of()); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + section.evaluate(mockContext, "TEST"); + } + + public void evaluateSkipCriteriaFalse() { + var section = new PrerequisiteSection(List.of(Prerequisites.FALSE), "unsupported", List.of(), "required", List.of()); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + section.evaluate(mockContext, "TEST"); + } + + public void evaluateRequiresCriteriaFalse() { + var section = new PrerequisiteSection( + List.of(Prerequisites.FALSE), + "unsupported", + List.of(Prerequisites.FALSE), + "required", + List.of() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + var e = expectThrows(AssumptionViolatedException.class, () -> section.evaluate(mockContext, "TEST")); + assertThat(e.getMessage(), equalTo("[TEST] skipped, reason: [required]")); + } + + public void evaluateSkipCriteriaTrue() { + var section = new PrerequisiteSection( + List.of(Prerequisites.TRUE), + "unsupported", + List.of(Prerequisites.TRUE), + "required", + List.of() + ); + + var mockContext = mock(ClientYamlTestExecutionContext.class); + var e = expectThrows(AssumptionViolatedException.class, () -> section.evaluate(mockContext, "TEST")); + assertThat(e.getMessage(), equalTo("[TEST] skipped, reason: [unsupported]")); + } +} diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java index 53aaf99d7e272..78c31c85178a6 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java @@ -37,7 +37,7 @@ public void testParseSetupSection() throws Exception { SetupSection setupSection = SetupSection.parse(parser); assertThat(setupSection, notNullValue()); - assertThat(setupSection.getSkipSection().isEmpty(), equalTo(true)); + assertThat(setupSection.getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(setupSection.getExecutableSections().size(), equalTo(2)); assertThat(setupSection.getExecutableSections().get(0), instanceOf(DoSection.class)); assertThat(((DoSection) setupSection.getExecutableSections().get(0)).getApiCallSection().getApi(), equalTo("index1")); @@ -60,7 +60,7 @@ public void testParseSetSectionInSetupSection() throws IOException { final SetupSection setupSection = SetupSection.parse(parser); assertNotNull(setupSection); - assertTrue(setupSection.getSkipSection().isEmpty()); + assertTrue(setupSection.getPrerequisiteSection().isEmpty()); assertThat(setupSection.getExecutableSections().size(), equalTo(5)); assertThat(setupSection.getExecutableSections().get(0), instanceOf(DoSection.class)); assertThat(((DoSection) setupSection.getExecutableSections().get(0)).getApiCallSection().getApi(), equalTo("cluster.state")); @@ -105,9 +105,9 @@ public void testParseSetupAndSkipSectionNoSkip() throws Exception { SetupSection setupSection = SetupSection.parse(parser); assertThat(setupSection, notNullValue()); - assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); - assertThat(setupSection.getSkipSection(), notNullValue()); - assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); + assertThat(setupSection.getPrerequisiteSection().isEmpty(), equalTo(false)); + assertThat(setupSection.getPrerequisiteSection(), notNullValue()); + assertThat(setupSection.getPrerequisiteSection().skipReason, equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(setupSection.getExecutableSections().size(), equalTo(2)); assertThat(setupSection.getExecutableSections().get(0), instanceOf(DoSection.class)); assertThat(((DoSection) setupSection.getExecutableSections().get(0)).getApiCallSection().getApi(), equalTo("index1")); diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java deleted file mode 100644 index bd1f8fa758499..0000000000000 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.test.rest.yaml.section; - -import org.elasticsearch.Version; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.core.Strings; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; -import org.elasticsearch.xcontent.yaml.YamlXContent; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Set; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyOrNullString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase { - - public void testSkipVersionMultiRange() { - SkipSection section = new SkipSection( - List.of(SkipCriteria.fromVersionRange("6.0.0 - 6.1.0, 7.1.0 - 7.5.0")), - Collections.emptyList(), - "foobar" - ); - - var outOfRangeMockContext = mock(ClientYamlTestExecutionContext.class); - when(outOfRangeMockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())) - .thenReturn(Set.of("6.2.0")) - .thenReturn(Set.of("7.0.0")) - .thenReturn(Set.of("7.6.0")); - - assertFalse(section.skip(outOfRangeMockContext)); - assertFalse(section.skip(outOfRangeMockContext)); - assertFalse(section.skip(outOfRangeMockContext)); - assertFalse(section.skip(outOfRangeMockContext)); - - var inRangeMockContext = mock(ClientYamlTestExecutionContext.class); - when(inRangeMockContext.nodesVersions()).thenReturn(Set.of("6.0.0")) - .thenReturn(Set.of("6.1.0")) - .thenReturn(Set.of("7.1.0")) - .thenReturn(Set.of("7.5.0")); - - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - } - - public void testSkipVersionMultiOpenRange() { - var section = new SkipSection( - List.of(SkipCriteria.fromVersionRange("- 7.1.0, 7.2.0 - 7.5.0, 8.0.0 -")), - Collections.emptyList(), - "foobar" - ); - - var outOfRangeMockContext = mock(ClientYamlTestExecutionContext.class); - when(outOfRangeMockContext.nodesVersions()).thenReturn(Set.of("7.1.1")).thenReturn(Set.of("7.6.0")); - - assertFalse(section.skip(outOfRangeMockContext)); - assertFalse(section.skip(outOfRangeMockContext)); - - var inRangeMockContext = mock(ClientYamlTestExecutionContext.class); - when(inRangeMockContext.nodesVersions()).thenReturn(Set.of("7.0.0")) - .thenReturn(Set.of("7.3.0")) - .thenReturn(Set.of("8.0.0")) - .thenReturn(Set.of(Version.CURRENT.toString())); - - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - assertTrue(section.skip(inRangeMockContext)); - } - - public void testSkipVersion() { - SkipSection section = new SkipSection(List.of(SkipCriteria.fromVersionRange("6.0.0 - 6.1.0")), Collections.emptyList(), "foobar"); - - var mockContext = mock(ClientYamlTestExecutionContext.class); - when(mockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())) - .thenReturn(Set.of("6.0.0")) - .thenReturn(Set.of("6.0.0", "6.1.0")) - .thenReturn(Set.of("6.0.0", "5.2.0")); - - assertFalse(section.skip(mockContext)); - assertTrue(section.skip(mockContext)); - assertTrue(section.skip(mockContext)); - assertFalse(section.skip(mockContext)); - } - - public void testSkipVersionWithTestFeatures() { - SkipSection section = new SkipSection( - List.of(SkipCriteria.fromVersionRange("6.0.0 - 6.1.0")), - Collections.singletonList("warnings"), - "foobar" - ); - - var mockContext = mock(ClientYamlTestExecutionContext.class); - when(mockContext.nodesVersions()).thenReturn(Set.of(Version.CURRENT.toString())).thenReturn(Set.of("6.0.0")); - - assertFalse(section.skip(mockContext)); - assertTrue(section.skip(mockContext)); - } - - public void testSkipTestFeatures() { - var section = new SkipSection.SkipSectionBuilder().withTestFeature("boom").build(); - assertTrue(section.skip(mock(ClientYamlTestExecutionContext.class))); - } - - public void testSkipTestFeaturesOverridesAnySkipCriteria() { - var section = new SkipSection.SkipSectionBuilder().withOs("test-os").withTestFeature("boom").build(); - - var mockContext = mock(ClientYamlTestExecutionContext.class); - when(mockContext.os()).thenReturn("test-os"); - - // Skip even if OS is matching - assertTrue(section.skip(mockContext)); - } - - public void testSkipOs() { - SkipSection section = new SkipSection.SkipSectionBuilder().withOs("windows95").withOs("debian-5").build(); - - var mockContext = mock(ClientYamlTestExecutionContext.class); - - when(mockContext.os()).thenReturn("debian-5"); - assertTrue(section.skip(mockContext)); - - when(mockContext.os()).thenReturn("windows95"); - assertTrue(section.skip(mockContext)); - - when(mockContext.os()).thenReturn("ms-dos"); - assertFalse(section.skip(mockContext)); - } - - public void testSkipOsWithTestFeatures() { - SkipSection section = new SkipSection.SkipSectionBuilder().withTestFeature("warnings") - .withOs("windows95") - .withOs("debian-5") - .build(); - - var mockContext = mock(ClientYamlTestExecutionContext.class); - when(mockContext.os()).thenReturn("debian-5"); - assertTrue(section.skip(mockContext)); - - when(mockContext.os()).thenReturn("windows95"); - assertTrue(section.skip(mockContext)); - - when(mockContext.os()).thenReturn("ms-dos"); - assertFalse(section.skip(mockContext)); - } - - public void testMessage() { - SkipSection section = new SkipSection( - List.of(SkipCriteria.fromVersionRange("6.0.0 - 6.1.0")), - Collections.singletonList("warnings"), - "foobar" - ); - assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR")); - section = new SkipSection(List.of(), Collections.singletonList("warnings"), "foobar"); - assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR")); - section = new SkipSection(List.of(), Collections.singletonList("warnings"), null); - assertEquals("[FOOBAR] skipped, unsupported features [warnings]", section.getSkipMessage("FOOBAR")); - } - - public void testParseSkipSectionVersionNoFeature() throws Exception { - Version version = VersionUtils.randomVersion(random()); - parser = createParser(YamlXContent.yamlXContent, Strings.format(""" - version: " - %s" - reason: Delete ignores the parent param""", version)); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, not(emptyOrNullString())); - assertThat(skipSectionBuilder.testFeatures.size(), equalTo(0)); - assertThat(skipSectionBuilder.reason, equalTo("Delete ignores the parent param")); - } - - public void testParseSkipSectionFeatureNoVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, "features: regex"); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, contains("regex")); - assertThat(skipSectionBuilder.reason, nullValue()); - assertThat(skipSectionBuilder.xpackRequested, is(SkipSection.SkipSectionBuilder.XPackRequested.NOT_SPECIFIED)); - } - - public void testParseXPackFeature() throws IOException { - parser = createParser(YamlXContent.yamlXContent, "features: xpack"); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, empty()); - assertThat(skipSectionBuilder.reason, nullValue()); - assertThat(skipSectionBuilder.xpackRequested, is(SkipSection.SkipSectionBuilder.XPackRequested.YES)); - } - - public void testParseNoXPackFeature() throws IOException { - parser = createParser(YamlXContent.yamlXContent, "features: no_xpack"); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, empty()); - assertThat(skipSectionBuilder.reason, nullValue()); - assertThat(skipSectionBuilder.xpackRequested, is(SkipSection.SkipSectionBuilder.XPackRequested.NO)); - } - - public void testParseBothXPackFeatures() throws IOException { - parser = createParser(YamlXContent.yamlXContent, "features: [xpack, no_xpack]"); - - var e = expectThrows(ParsingException.class, () -> SkipSection.parseInternal(parser)); - assertThat(e.getMessage(), containsString("either `xpack` or `no_xpack` can be present, not both")); - } - - public void testParseSkipSectionFeaturesNoVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, "features: [regex1,regex2,regex3]"); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, contains("regex1", "regex2", "regex3")); - assertThat(skipSectionBuilder.reason, nullValue()); - } - - public void testParseSkipSectionBothFeatureAndVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, """ - version: " - 0.90.2" - features: regex - reason: Delete ignores the parent param"""); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder.version, not(emptyOrNullString())); - assertThat(skipSectionBuilder.testFeatures, contains("regex")); - assertThat(skipSectionBuilder.reason, equalTo("Delete ignores the parent param")); - } - - public void testParseSkipSectionNoReason() throws Exception { - parser = createParser(YamlXContent.yamlXContent, "version: \" - 0.90.2\"\n"); - - Exception e = expectThrows(ParsingException.class, () -> SkipSection.parseInternal(parser)); - assertThat(e.getMessage(), is("reason is mandatory within skip version section")); - } - - public void testParseSkipSectionNoVersionNorFeature() throws Exception { - parser = createParser(YamlXContent.yamlXContent, "reason: Delete ignores the parent param\n"); - - Exception e = expectThrows(ParsingException.class, () -> SkipSection.parseInternal(parser)); - assertThat(e.getMessage(), is("at least one criteria (version, test features, os) is mandatory within a skip section")); - } - - public void testParseSkipSectionOsNoVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, """ - features: ["skip_os", "some_feature"] - os: debian-9 - reason: memory accounting broken, see gh#xyz - """); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, hasSize(2)); - assertThat(skipSectionBuilder.operatingSystems, contains("debian-9")); - assertThat(skipSectionBuilder.reason, is("memory accounting broken, see gh#xyz")); - } - - public void testParseSkipSectionOsListNoVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, """ - features: skip_os - os: [debian-9,windows-95,ms-dos] - reason: see gh#xyz - """); - - var skipSectionBuilder = SkipSection.parseInternal(parser); - assertThat(skipSectionBuilder, notNullValue()); - assertThat(skipSectionBuilder.version, emptyOrNullString()); - assertThat(skipSectionBuilder.testFeatures, hasSize(1)); - assertThat(skipSectionBuilder.operatingSystems, containsInAnyOrder("debian-9", "windows-95", "ms-dos")); - assertThat(skipSectionBuilder.reason, is("see gh#xyz")); - } - - public void testParseSkipSectionOsNoFeatureNoVersion() throws Exception { - parser = createParser(YamlXContent.yamlXContent, """ - os: debian-9 - reason: memory accounting broken, see gh#xyz - """); - - Exception e = expectThrows(ParsingException.class, () -> SkipSection.parseInternal(parser)); - assertThat(e.getMessage(), is("if os is specified, feature skip_os must be set")); - } -} diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java index 2c6b4f5be12de..9844b90eb2148 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java @@ -35,7 +35,7 @@ public void testParseTeardownSection() throws Exception { TeardownSection section = TeardownSection.parse(parser); assertThat(section, notNullValue()); - assertThat(section.getSkipSection().isEmpty(), equalTo(true)); + assertThat(section.getPrerequisiteSection().isEmpty(), equalTo(true)); assertThat(section.getDoSections().size(), equalTo(2)); assertThat(((DoSection) section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete")); assertThat(((DoSection) section.getDoSections().get(1)).getApiCallSection().getApi(), equalTo("delete2")); @@ -62,8 +62,8 @@ public void testParseWithSkip() throws Exception { TeardownSection section = TeardownSection.parse(parser); assertThat(section, notNullValue()); - assertThat(section.getSkipSection().isEmpty(), equalTo(false)); - assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); + assertThat(section.getPrerequisiteSection().isEmpty(), equalTo(false)); + assertThat(section.getPrerequisiteSection().skipReason, equalTo("there is a reason")); assertThat(section.getDoSections().size(), equalTo(2)); assertThat(((DoSection) section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete")); assertThat(((DoSection) section.getDoSections().get(1)).getApiCallSection().getApi(), equalTo("delete2")); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java index b0e8b8ae05b51..61917220f10d1 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java @@ -83,7 +83,7 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu public BoxplotAggregationBuilder(StreamInput in) throws IOException { super(in); compression = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { executionHint = in.readOptionalWriteable(TDigestExecutionHint::readFrom); } else { executionHint = TDigestExecutionHint.HIGH_ACCURACY; @@ -98,7 +98,7 @@ public Set metricNames() { @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDouble(compression); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(executionHint); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java index c100d57dfb3d1..e71cedf381886 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java @@ -59,7 +59,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalSimpleLongValue(name(), cardinality, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index a06eb509d2539..421973723837d 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -394,7 +394,7 @@ public void parse(DocumentParserContext context) throws IOException { ); } else if (count > 0) { // we do not add elements with count == 0 - if (streamOutput.getTransportVersion().onOrAfter(TransportVersions.LONG_COUNT_IN_HISTOGRAM_ADDED)) { + if (streamOutput.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { streamOutput.writeVLong(count); } else { streamOutput.writeVInt(Math.toIntExact(count)); @@ -455,7 +455,7 @@ void reset(BytesRef bytesRef) { @Override public boolean next() throws IOException { if (streamInput.available() > 0) { - if (streamInput.getTransportVersion().onOrAfter(TransportVersions.LONG_COUNT_IN_HISTOGRAM_ADDED)) { + if (streamInput.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { count = streamInput.readVLong(); } else { count = streamInput.readVInt(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java index 3dc364b1ec131..663299df54f8b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java @@ -101,11 +101,7 @@ private void reduceTDigest( } if (state != null) { - List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); + List aggs = bucket.getAggregations().asList().stream().collect(Collectors.toList()); aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); } @@ -151,11 +147,7 @@ private void reduceHDR( } if (state != null) { - List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); + List aggs = new ArrayList<>(bucket.getAggregations().asList()); aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 4b983f24c5466..4aa80ae907868 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -14,7 +14,6 @@ import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -118,7 +117,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java index edbd750cdcc52..adb8b691a83ea 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java @@ -71,7 +71,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), normalizedBucketValue, formatter, metadata())); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java index f3af195bc6fa1..dc4b096f3a08e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java @@ -63,7 +63,7 @@ public InternalResetTrackingRate(StreamInput in) throws IOException { this.startTime = in.readLong(); this.endTime = in.readLong(); this.resetCompensation = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.rateUnit = Rounding.DateTimeUnit.resolve(in.readByte()); } else { this.rateUnit = Rounding.DateTimeUnit.SECOND_OF_MINUTE; @@ -82,7 +82,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(startTime); out.writeLong(endTime); out.writeDouble(resetCompensation); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) && rateUnit != null) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) && rateUnit != null) { out.writeByte(rateUnit.getId()); } else { out.writeByte(Rounding.DateTimeUnit.SECOND_OF_MINUTE.getId()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java index 6fd6ce81b32a2..0ad11c0395ebd 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java @@ -10,16 +10,11 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.TDigestState; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; import java.io.IOException; @@ -61,11 +56,6 @@ protected void assertReduced(InternalBoxplot reduced, List inpu assertEquals(expected.getMin(), reduced.getMin(), 0); } - @Override - protected void assertFromXContent(InternalBoxplot min, ParsedAggregation parsedAggregation) { - // There is no ParsedBoxplot yet so we cannot test it here - } - @Override protected InternalBoxplot mutateInstance(InternalBoxplot instance) { String name = instance.getName(); @@ -96,17 +86,6 @@ protected InternalBoxplot mutateInstance(InternalBoxplot instance) { return new InternalBoxplot(name, state, formatter, metadata); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(BoxplotAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedBoxlot yet", false); - return null; - }) - ); - } - public void testIQR() { double epsilon = 0.00001; // tolerance on equality for doubles TDigestState state = TDigestState.create(100); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java index 0a91a56883834..361d28a5c0af8 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; @@ -18,16 +17,12 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; import java.util.ArrayList; @@ -265,11 +260,6 @@ protected void assertReduced(InternalMultiTerms reduced, List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(MultiTermsAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedMultiTerms yet", false); - return null; - }) - ); - } - public void testKeyConverters() { assertThat( UNSIGNED_LONG.toDouble(UNSIGNED_LONG_SHIFTED, UNSIGNED_LONG_SHIFTED.parseLong("123", false, () -> 0L)), diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java index 1d497d1856285..2d61132b34b6f 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java @@ -7,17 +7,12 @@ package org.elasticsearch.xpack.analytics.rate; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; import java.util.ArrayList; @@ -71,11 +66,6 @@ protected void assertReduced(InternalRate reduced, List inputs) { assertEquals(expected, reduced.getValue(), 0.00001); } - @Override - protected void assertFromXContent(InternalRate min, ParsedAggregation parsedAggregation) { - // There is no ParsedRate yet so we cannot test it here - } - @Override protected InternalRate mutateInstance(InternalRate instance) { String name = instance.getName(); @@ -99,15 +89,4 @@ protected InternalRate mutateInstance(InternalRate instance) { } return new InternalRate(name, sum, divider, formatter, metadata); } - - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(RateAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedRate yet", false); - return null; - }) - ); - } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRateTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRateTests.java index e12697e418056..7f651864cd6ee 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRateTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRateTests.java @@ -8,17 +8,11 @@ package org.elasticsearch.xpack.analytics.rate; import org.elasticsearch.common.Rounding; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -141,22 +135,6 @@ protected BuilderAndToReduce randomResultsToReduce(St return new BuilderAndToReduce<>(mock(RateAggregationBuilder.class), internalRates); } - @Override - protected void assertFromXContent(InternalResetTrackingRate aggregation, ParsedAggregation parsedAggregation) throws IOException { - - } - - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(InternalResetTrackingRate.NAME), (p, c) -> { - assumeTrue("There is no ParsedRate yet", false); - return null; - }) - ); - } - public void testIncludes() { InternalResetTrackingRate big = new InternalResetTrackingRate( "n", diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java index 132f3b30b2f3a..3abd42d6f2b57 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java @@ -7,30 +7,21 @@ package org.elasticsearch.xpack.analytics.stringstats; -import org.elasticsearch.client.analytics.ParsedStringStats; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; -import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.stream.Stream; import static java.util.Collections.emptyMap; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; public class InternalStringStatsTests extends InternalAggregationTestCase { @@ -40,18 +31,6 @@ protected SearchPlugin registerPlugin() { return new AnalyticsPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(StringStatsAggregationBuilder.NAME), - (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c) - ) - ); - } - @Override protected InternalStringStats createTestInstance(String name, Map metadata) { return createTestInstance(name, metadata, Long.MAX_VALUE, Long.MAX_VALUE); @@ -123,36 +102,6 @@ protected InternalStringStats mutateInstance(InternalStringStats instance) { ); } - @Override - protected void assertFromXContent(InternalStringStats aggregation, ParsedAggregation parsedAggregation) throws IOException { - ParsedStringStats parsed = (ParsedStringStats) parsedAggregation; - assertThat(parsed.getName(), equalTo(aggregation.getName())); - if (aggregation.getCount() == 0) { - assertThat(parsed.getCount(), equalTo(0L)); - assertThat(parsed.getMinLength(), equalTo(0)); - assertThat(parsed.getMaxLength(), equalTo(0)); - assertThat(parsed.getAvgLength(), equalTo(0d)); - assertThat(parsed.getEntropy(), equalTo(0d)); - assertThat(parsed.getDistribution(), nullValue()); - return; - } - assertThat(parsed.getCount(), equalTo(aggregation.getCount())); - assertThat(parsed.getMinLength(), equalTo(aggregation.getMinLength())); - assertThat(parsed.getMaxLength(), equalTo(aggregation.getMaxLength())); - assertThat(parsed.getAvgLength(), equalTo(aggregation.getAvgLength())); - assertThat(parsed.getEntropy(), equalTo(aggregation.getEntropy())); - if (aggregation.getShowDistribution()) { - assertThat(parsed.getDistribution(), equalTo(aggregation.getDistribution())); - } else { - assertThat(parsed.getDistribution(), nullValue()); - } - } - - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.endsWith(".distribution"); - } - @Override protected void assertReduced(InternalStringStats reduced, List inputs) { assertThat(reduced.getCount(), equalTo(inputs.stream().mapToLong(InternalStringStats::getCount).sum())); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java index a4aa3da51c294..48566d178a6e6 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java @@ -8,24 +8,18 @@ package org.elasticsearch.xpack.analytics.topmetrics; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.client.analytics.ParsedTopMetrics; import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateUtils; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortValue; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; import org.elasticsearch.xpack.analytics.topmetrics.InternalTopMetrics.MetricValue; @@ -39,16 +33,12 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.IntStream; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notANumber; import static org.mockito.Mockito.mock; @@ -282,18 +272,6 @@ private InternalTopMetrics resultWithAllTypes() { ); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(TopMetricsAggregationBuilder.NAME), - (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c) - ) - ); - } - @Override protected InternalTopMetrics createTestInstance(String name, Map metadata) { return createTestInstance( @@ -358,48 +336,6 @@ protected InternalTopMetrics mutateInstance(InternalTopMetrics instance) { return new InternalTopMetrics(name, instanceSortOrder, metricNames, size, topMetrics, instance.getMetadata()); } - /** - * An extra test for parsing dates from xcontent because we can't random - * into {@link DocValueFormat.DateTime} because it doesn't - * implement {@link Object#equals(Object)}. - */ - public void testFromXContentDates() throws IOException { - InternalTopMetrics aggregation = createTestInstance( - randomAlphaOfLength(3), - emptyMap(), - InternalTopMetricsTests::strictDateTime, - InternalTopMetricsTests::randomSortValue - ); - ParsedAggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), randomBoolean()); - assertFromXContent(aggregation, parsedAggregation); - } - - @Override - protected void assertFromXContent(InternalTopMetrics aggregation, ParsedAggregation parsedAggregation) throws IOException { - ParsedTopMetrics parsed = (ParsedTopMetrics) parsedAggregation; - assertThat(parsed.getName(), equalTo(aggregation.getName())); - assertThat(parsed.getTopMetrics(), hasSize(aggregation.getTopMetrics().size())); - for (int i = 0; i < parsed.getTopMetrics().size(); i++) { - ParsedTopMetrics.TopMetrics parsedTop = parsed.getTopMetrics().get(i); - InternalTopMetrics.TopMetric internalTop = aggregation.getTopMetrics().get(i); - Object expectedSort = internalTop.getSortFormat() == DocValueFormat.RAW - ? internalTop.getSortValue().getKey() - : internalTop.getSortValue().format(internalTop.getSortFormat()); - assertThat(parsedTop.getSort(), equalTo(singletonList(expectedSort))); - assertThat(parsedTop.getMetrics().keySet(), hasSize(aggregation.getMetricNames().size())); - for (int m = 0; m < aggregation.getMetricNames().size(); m++) { - String name = aggregation.getMetricNames().get(m); - InternalTopMetrics.MetricValue value = internalTop.getMetricValues().get(m); - assertThat(parsedTop.getMetrics(), hasKey(name)); - if (value.getFormat() == DocValueFormat.RAW) { - assertThat(parsedTop.getMetrics().get(name), equalTo(value.numberValue())); - } else { - assertThat(parsedTop.getMetrics().get(name), equalTo(value.getValue().format(value.getFormat()))); - } - } - } - } - @Override protected BuilderAndToReduce randomResultsToReduce(String name, int size) { InternalTopMetrics prototype = createTestInstance(); @@ -499,9 +435,4 @@ private static SortValue randomSortValue(DocValueFormat docValueFormat) { } return randomSortValue(); } - - @Override - protected Predicate excludePathsFromXContentInsertion() { - return path -> path.endsWith(".metrics"); - } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java index 7a8bb0cc6471f..106723cdb343a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java @@ -10,17 +10,12 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; import java.io.IOException; @@ -88,11 +83,6 @@ protected boolean supportsSampling() { return true; } - @Override - protected void assertFromXContent(InternalTTest min, ParsedAggregation parsedAggregation) { - // There is no ParsedTTest yet so we cannot test it here - } - @Override protected InternalTTest mutateInstance(InternalTTest instance) { String name = instance.getName(); @@ -122,15 +112,4 @@ protected InternalTTest mutateInstance(InternalTTest instance) { } return new InternalTTest(name, state, formatter, metadata); } - - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(TTestAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedTTest yet", false); - return null; - }) - ); - } } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index f528d99133756..6ec287fe2b980 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.apmdata; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -19,7 +21,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.yaml.YamlXContent; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; import org.elasticsearch.xpack.core.template.IngestPipelineConfig; @@ -37,12 +38,14 @@ * Creates all index templates and ingest pipelines that are required for using Elastic APM. */ public class APMIndexTemplateRegistry extends IndexTemplateRegistry { + private static final Logger logger = LogManager.getLogger(APMIndexTemplateRegistry.class); + private final int version; private final Map componentTemplates; private final Map composableIndexTemplates; private final List ingestPipelines; - private final boolean enabled; + private volatile boolean enabled; @SuppressWarnings("unchecked") public APMIndexTemplateRegistry( @@ -75,8 +78,6 @@ public APMIndexTemplateRegistry( Map.Entry> pipelineConfig = map.entrySet().iterator().next(); return loadIngestPipeline(pipelineConfig.getKey(), version, (List) pipelineConfig.getValue().get("dependencies")); }).collect(Collectors.toList()); - - enabled = XPackSettings.APM_DATA_ENABLED.get(nodeSettings); } catch (IOException e) { throw new RuntimeException(e); } @@ -86,6 +87,11 @@ public int getVersion() { return version; } + void setEnabled(boolean enabled) { + logger.info("APM index template registry is {}", enabled ? "enabled" : "disabled"); + this.enabled = enabled; + } + public boolean isEnabled() { return enabled; } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index 7acf3a3c972da..f905c17c04b4c 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -10,36 +10,62 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.XPackSettings; import java.util.Collection; +import java.util.Collections; import java.util.List; public class APMPlugin extends Plugin implements ActionPlugin { private static final Logger logger = LogManager.getLogger(APMPlugin.class); - private final SetOnce registry = new SetOnce<>(); + final SetOnce registry = new SetOnce<>(); + + private final boolean enabled; + + // APM_DATA_REGISTRY_ENABLED controls enabling the index template registry. + // + // This setting will be ignored if the plugin is disabled. + static final Setting APM_DATA_REGISTRY_ENABLED = Setting.boolSetting( + "xpack.apm_data.registry.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public APMPlugin(Settings settings) { + this.enabled = XPackSettings.APM_DATA_ENABLED.get(settings); + } @Override public Collection createComponents(PluginServices services) { + logger.info("APM ingest plugin is {}", enabled ? "enabled" : "disabled"); + Settings settings = services.environment().settings(); + ClusterService clusterService = services.clusterService(); registry.set( - new APMIndexTemplateRegistry( - services.environment().settings(), - services.clusterService(), - services.threadPool(), - services.client(), - services.xContentRegistry() - ) + new APMIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); - APMIndexTemplateRegistry registryInstance = registry.get(); - logger.info("APM ingest plugin is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); - registryInstance.initialize(); - return List.of(registryInstance); + if (enabled) { + APMIndexTemplateRegistry registryInstance = registry.get(); + registryInstance.setEnabled(APM_DATA_REGISTRY_ENABLED.get(settings)); + clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_DATA_REGISTRY_ENABLED, registryInstance::setEnabled); + registryInstance.initialize(); + } + return Collections.emptyList(); } @Override public void close() { registry.get().close(); } + + @Override + public List> getSettings() { + return List.of(APM_DATA_REGISTRY_ENABLED); + } } diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 0ebbb99a1e379..3d9c1490e5a86 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -11,12 +11,12 @@ composed_of: - apm@mappings - apm@settings - apm-10d@lifecycle -- apm@custom +- logs@custom - logs-apm.app@custom - ecs@mappings ignore_missing_component_templates: +- logs@custom - logs-apm.app@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 831f7cc404415..4adcf125b2df9 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -13,12 +13,12 @@ composed_of: - apm@settings - apm-10d@lifecycle - logs-apm.error@mappings -- apm@custom +- logs@custom - logs-apm.error@custom - ecs@mappings ignore_missing_component_templates: +- logs@custom - logs-apm.error@custom -- apm@custom template: mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml index bdd1fa363bcf4..c2233469110f8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml @@ -13,11 +13,11 @@ composed_of: - apm-90d@lifecycle - metrics-apm@mappings - metrics-apm@settings -- apm@custom +- metrics@custom - metrics-apm.app@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- metrics@custom - metrics-apm.app@custom template: settings: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml index 205784e22e685..3d6d05c58e780 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml @@ -14,11 +14,11 @@ composed_of: - apm-90d@lifecycle - metrics-apm@mappings - metrics-apm@settings -- apm@custom +- metrics@custom - metrics-apm.internal@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- metrics@custom - metrics-apm.internal@custom template: settings: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml index 6279e044fbfcf..f234b60b1a6ec 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml index 10e4ca5b39a52..aa4f212532e56 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml index dbac0d0d17d89..9b1a26486f482 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml index af99e419d4a56..c37ec93651d9d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml index 29c28953d6b40..3a99bc8472c66 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml index bdbd4900df3bb..d829967f7eddf 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml index 8b4e88391a475..bc21b35d4777f 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml index 811067f8e6f30..87a1e254baea7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml index db28b7c56aaab..b45ce0ec0fad7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml index 548f73656fda4..51d3c90cb4af8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml index 6206e7c126c48..8825a93db28dc 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml index 4ad00aecf23a5..e6657fbfe5d28 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml index 174faf432eb6e..174aec8c5515a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml @@ -14,14 +14,12 @@ composed_of: - apm-90d@lifecycle - traces-apm@mappings - traces-apm.rum@mappings -- apm@custom -- traces-apm@custom +- traces@custom - traces-apm.rum@custom - ecs@mappings ignore_missing_component_templates: +- traces@custom - traces-apm.rum@custom -- traces-apm@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml index 8c65c69bc3afa..a39d10897a2ed 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml @@ -11,10 +11,12 @@ composed_of: - traces@mappings - apm@mappings - apm@settings -- apm@custom +- traces@custom +- traces-apm.sampled@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- traces@custom +- traces-apm.sampled@custom template: lifecycle: data_retention: 1h diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml index fb6670a7f7143..de9c47dfd3f1b 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml @@ -13,12 +13,12 @@ composed_of: - apm@settings - apm-10d@lifecycle - traces-apm@mappings -- apm@custom +- traces@custom - traces-apm@custom - ecs@mappings ignore_missing_component_templates: +- traces@custom - traces-apm@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 7dcd6fdd807e4..4f6a5b58ff38d 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; @@ -55,12 +56,15 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.xpack.core.XPackSettings.APM_DATA_ENABLED; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -75,20 +79,28 @@ public class APMIndexTemplateRegistryTests extends ESTestCase { @Before public void createRegistryAndClient() { + final ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Stream.concat(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), Set.of(APMPlugin.APM_DATA_REGISTRY_ENABLED).stream()) + .collect(Collectors.toSet()) + ); + threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); - clusterService = ClusterServiceUtils.createClusterService(threadPool); + clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); FeatureService featureService = new FeatureService(List.of()); stackTemplateRegistryAccessor = new StackTemplateRegistryAccessor( new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY, featureService) ); + apmIndexTemplateRegistry = new APMIndexTemplateRegistry( - Settings.builder().put(APM_DATA_ENABLED.getKey(), true).build(), + Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY ); + apmIndexTemplateRegistry.setEnabled(true); } @After @@ -111,6 +123,28 @@ public void testThatMissingMasterNodeDoesNothing() { apmIndexTemplateRegistry.clusterChanged(event); } + public void testThatDisablingRegistryDoesNothing() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + apmIndexTemplateRegistry.setEnabled(false); + assertThat(apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet(), hasSize(0)); + assertThat(apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet(), hasSize(0)); + assertThat(apmIndexTemplateRegistry.getIngestPipelines(), hasSize(0)); + + client.setVerifier((a, r, l) -> { + fail("if the registry is disabled nothing should happen"); + return null; + }); + ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes); + apmIndexTemplateRegistry.clusterChanged(event); + + apmIndexTemplateRegistry.setEnabled(true); + assertThat(apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet(), not(hasSize(0))); + assertThat(apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet(), not(hasSize(0))); + assertThat(apmIndexTemplateRegistry.getIngestPipelines(), not(hasSize(0))); + } + public void testThatIndependentTemplatesAreAddedImmediatelyIfMissing() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); @@ -249,6 +283,48 @@ public void testIndexTemplates() throws Exception { assertThat(actualInstalledIngestPipelines.get(), equalTo(0)); } + public void testIndexTemplateConventions() throws Exception { + for (Map.Entry entry : apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet()) { + final String name = entry.getKey(); + final int atIndex = name.lastIndexOf('@'); + assertThat(atIndex, not(equalTo(-1))); + assertThat(name.substring(atIndex + 1), equalTo("template")); + + final String dataStreamType = name.substring(0, name.indexOf('-')); + assertThat(dataStreamType, isIn(List.of("logs", "metrics", "traces"))); + + final ComposableIndexTemplate template = entry.getValue(); + assertThat(template.indexPatterns().size(), equalTo(1)); + + final String namePrefix = name.substring(0, atIndex); + switch (namePrefix) { + case "logs-apm.app", "metrics-apm.app": + // These two data streams have a service-specific dataset. + assertThat(template.indexPatterns().get(0), equalTo(namePrefix + ".*-*")); + break; + default: + assertThat(template.indexPatterns().get(0), equalTo(namePrefix + "-*")); + break; + } + + // Each index template should be composed of the following optional component templates: + // @custom + // -@custom + final List optionalComponentTemplates = template.composedOf() + .stream() + .filter(t -> template.getIgnoreMissingComponentTemplates().contains(t)) + .toList(); + assertThat(optionalComponentTemplates, containsInAnyOrder(namePrefix + "@custom", dataStreamType + "@custom")); + + // There should be no required custom component templates. + final List requiredCustomComponentTemplates = template.getRequiredComponentTemplates() + .stream() + .filter(t -> t.endsWith("@custom")) + .toList(); + assertThat(requiredCustomComponentTemplates, empty()); + } + } + private Map getIndependentComponentTemplateConfigs() { return apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet().stream().filter(template -> { Settings settings = template.getValue().template().settings(); diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java new file mode 100644 index 0000000000000..289852737393e --- /dev/null +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apmdata; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackSettings; +import org.junit.After; +import org.junit.Before; + +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class APMPluginTests extends ESTestCase { + private APMPlugin apmPlugin; + private ClusterService clusterService; + private ThreadPool threadPool; + + @Before + public void createPlugin() { + final ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Stream.concat(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), Set.of(APMPlugin.APM_DATA_REGISTRY_ENABLED).stream()) + .collect(Collectors.toSet()) + ); + threadPool = new TestThreadPool(this.getClass().getName()); + clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); + apmPlugin = new APMPlugin(Settings.builder().put(XPackSettings.APM_DATA_ENABLED.getKey(), true).build()); + } + + private void createComponents() { + Environment mockEnvironment = mock(Environment.class); + when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.clusterService()).thenReturn(clusterService); + when(services.threadPool()).thenReturn(threadPool); + when(services.environment()).thenReturn(mockEnvironment); + apmPlugin.createComponents(services); + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + apmPlugin.close(); + threadPool.shutdownNow(); + } + + public void testRegistryEnabledSetting() throws Exception { + createComponents(); + + // By default, the registry is enabled. + assertTrue(apmPlugin.registry.get().isEnabled()); + + // The registry can be disabled/enabled dynamically. + clusterService.getClusterSettings() + .applySettings(Settings.builder().put(APMPlugin.APM_DATA_REGISTRY_ENABLED.getKey(), false).build()); + assertFalse(apmPlugin.registry.get().isEnabled()); + } + + public void testDisablingPluginDisablesRegistry() throws Exception { + apmPlugin = new APMPlugin(Settings.builder().put(XPackSettings.APM_DATA_ENABLED.getKey(), false).build()); + createComponents(); + + // The plugin is disabled, so the registry is disabled too. + assertFalse(apmPlugin.registry.get().isEnabled()); + + // The registry can not be enabled dynamically when the plugin is disabled. + clusterService.getClusterSettings() + .applySettings(Settings.builder().put(APMPlugin.APM_DATA_REGISTRY_ENABLED.getKey(), true).build()); + assertFalse(apmPlugin.registry.get().isEnabled()); + } +} diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml new file mode 100644 index 0000000000000..62b36926d01dc --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml @@ -0,0 +1,76 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "metrics@custom" + body: + template: + mappings: + properties: + custom_field1: + type: keyword + meta: + source: metrics@custom + custom_field2: + type: keyword + meta: + source: metrics@custom + + - do: + cluster.put_component_template: + name: "metrics-apm.app@custom" + body: + template: + mappings: + properties: + custom_field2: + type: keyword + meta: + source: metrics-apm.app@custom + custom_field3: + type: keyword + meta: + source: metrics-apm.app@custom + +--- +"Test metrics @custom component templates": + - do: + indices.create_data_stream: + name: metrics-apm.app.svc1-testing + - do: + # Wait for cluster state changes to be applied before + # querying field mappings. + cluster.health: + wait_for_events: languid + - do: + indices.get_field_mapping: + index: metrics-apm.app.svc1-testing + fields: custom_field* + - set: {_arbitrary_key_: index} + - match: + $body.$index.mappings: + custom_field1: + full_name: custom_field1 + mapping: + custom_field1: + type: keyword + meta: + source: metrics@custom + custom_field2: + full_name: custom_field2 + mapping: + custom_field2: + type: keyword + meta: + source: metrics-apm.app@custom + custom_field3: + full_name: custom_field3 + mapping: + custom_field3: + type: keyword + meta: + source: metrics-apm.app@custom diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 1819ad7960006..88ae09fbcdc99 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -192,7 +192,7 @@ private SearchHit[] getSearchHits(String asyncId, String user) throws IOExceptio ) ).getSearchResponse(); try { - return searchResponse.getHits().getHits(); + return searchResponse.getHits().asUnpooled().getHits(); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index bb3dc5b866b54..3605d6365f867 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; @@ -1318,7 +1317,7 @@ public void testCancelViaTasksAPI() throws Exception { SearchListenerPlugin.waitSearchStarted(); - ActionFuture cancelFuture; + ActionFuture cancelFuture; try { ListTasksResponse listTasksResponse = client(LOCAL_CLUSTER).admin() .cluster() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java index e15f9781a069f..195d00169840a 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -44,6 +45,7 @@ public final class AsyncSearch extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -52,7 +54,7 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder()), + new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry), new RestGetAsyncSearchAction(), new RestGetAsyncStatusAction(), new RestDeleteAsyncSearchAction() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index 481f5c79ba2ed..04b0b11ad38d4 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -13,7 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.search.CCSSingleCoordinatorSearchProgressListener; import org.elasticsearch.action.search.SearchProgressActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -155,7 +155,7 @@ public void cancelTask(Runnable runnable, String reason) { CancelTasksRequest req = new CancelTasksRequest().setTargetTaskId(searchId.getTaskId()).setReason(reason); client.admin().cluster().cancelTasks(req, new ActionListener<>() { @Override - public void onResponse(CancelTasksResponse cancelTasksResponse) { + public void onResponse(ListTasksResponse cancelTasksResponse) { runnable.run(); } @@ -499,6 +499,19 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna searchResponse.get().updatePartialResponse(shards.size(), totalHits, () -> aggregations, reducePhase); } + /** + * Indicates that a cluster has finished a search operation. Used for CCS minimize_roundtrips=true only. + * + * @param clusterAlias alias of cluster that has finished a search operation and returned a SearchResponse. + * The cluster alias for the local cluster is RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY. + * @param clusterResponse SearchResponse from cluster 'clusterAlias' + */ + @Override + public void onClusterResponseMinimizeRoundtrips(String clusterAlias, SearchResponse clusterResponse) { + // no need to call the delegate progress listener, since this method is only called for minimize_roundtrips=true + searchResponse.get().updateResponseMinimizeRoundtrips(clusterAlias, clusterResponse); + } + @Override public void onResponse(SearchResponse response) { searchResponse.get().updateFinalResponse(response, ccsMinimizeRoundtrips); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java index e50a4ce1ed94f..45f393a9a845e 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java @@ -11,6 +11,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; +import org.elasticsearch.action.search.SearchResponseMerger; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -19,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse; import org.elasticsearch.xpack.core.search.action.AsyncStatusResponse; @@ -61,7 +63,20 @@ class MutableSearchResponse implements Releasable { private SearchResponse finalResponse; private ElasticsearchException failure; private Map> responseHeaders; - + /** + * Set to true when the local cluster has completed (its full SearchResponse + * has been received. Only used for CCS minimize_roundtrips=true. + */ + private boolean localClusterComplete; + /** + * For CCS minimize_roundtrips=true, we collect SearchResponses from each cluster in + * order to provide partial results before all clusters have reported back results. + */ + private List clusterResponses; + /** + * Set to true when the final SearchResponse has been received + * or a fatal error has occurred. + */ private boolean frozen; /** @@ -81,11 +96,16 @@ class MutableSearchResponse implements Releasable { this.isPartial = true; this.threadContext = threadContext; this.totalHits = EMPTY_TOTAL_HITS; + this.localClusterComplete = false; } /** * Updates the response with the result of a partial reduction. + * + * @param successfulShards + * @param totalHits * @param reducedAggs is a strategy for producing the reduced aggs + * @param reducePhase */ @SuppressWarnings("HiddenField") synchronized void updatePartialResponse( @@ -128,6 +148,24 @@ assert shardsInResponseMatchExpected(response, ccsMinimizeRoundtrips) this.frozen = true; } + /** + * Indicates that a cluster has finished a search operation. Used for CCS minimize_roundtrips=true only. + * + * @param clusterAlias alias of cluster that has finished a search operation and returned a SearchResponse. + * The cluster alias for the local cluster is RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY. + * @param clusterResponse SearchResponse from cluster 'clusterAlias' + */ + synchronized void updateResponseMinimizeRoundtrips(String clusterAlias, SearchResponse clusterResponse) { + if (clusterResponses == null) { + clusterResponses = new ArrayList<>(); + } + clusterResponses.add(clusterResponse); + clusterResponse.mustIncRef(); + if (RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias)) { + localClusterComplete = true; + } + } + private boolean isPartialResponse(SearchResponse response) { if (response.getClusters() == null) { return true; @@ -190,6 +228,7 @@ synchronized AsyncSearchResponse toAsyncSearchResponse(AsyncSearchTask task, lon if (restoreResponseHeaders && responseHeaders != null) { restoreResponseHeadersContext(threadContext, responseHeaders); } + SearchResponse searchResponse; if (finalResponse != null) { // We have a final response, use it. @@ -199,16 +238,43 @@ synchronized AsyncSearchResponse toAsyncSearchResponse(AsyncSearchTask task, lon // An error occurred before we got the shard list searchResponse = null; } else { - /* - * Build the response, reducing aggs if we haven't already and - * storing the result of the reduction, so we won't have to reduce - * the same aggregation results a second time if nothing has changed. - * This does cost memory because we have a reference to the finally - * reduced aggs sitting around which can't be GCed until we get an update. - */ - InternalAggregations reducedAggs = reducedAggsSource.get(); - reducedAggsSource = () -> reducedAggs; - searchResponse = buildResponse(task.getStartTimeNanos(), reducedAggs); + // partial results branch + SearchResponseMerger searchResponseMerger = createSearchResponseMerger(task); + try { + if (searchResponseMerger == null) { // local-only search or CCS MRT=false + /* + * Build the response, reducing aggs if we haven't already and + * storing the result of the reduction, so we won't have to reduce + * the same aggregation results a second time if nothing has changed. + * This does cost memory because we have a reference to the finally + * reduced aggs sitting around which can't be GCed until we get an update. + */ + InternalAggregations reducedAggs = reducedAggsSource.get(); + reducedAggsSource = () -> reducedAggs; + searchResponse = buildResponse(task.getStartTimeNanos(), reducedAggs); + } else if (localClusterComplete == false) { + /* + * For CCS MRT=true and the local cluster has reported back only partial results + * (subset of shards), so use SearchResponseMerger to do a merge of any full results that + * have come in from remote clusters and the partial results of the local cluster + */ + InternalAggregations reducedAggs = reducedAggsSource.get(); + reducedAggsSource = () -> reducedAggs; + SearchResponse partialAggsSearchResponse = buildResponse(task.getStartTimeNanos(), reducedAggs); + try { + searchResponse = getMergedResponse(searchResponseMerger, partialAggsSearchResponse); + } finally { + partialAggsSearchResponse.decRef(); + } + } else { + // For CCS MRT=true when the local cluster has reported back full results (via updateResponseMinimizeRoundtrips) + searchResponse = getMergedResponse(searchResponseMerger); + } + } finally { + if (searchResponseMerger != null) { + searchResponseMerger.close(); + } + } } try { return new AsyncSearchResponse( @@ -227,6 +293,41 @@ synchronized AsyncSearchResponse toAsyncSearchResponse(AsyncSearchTask task, lon } } + /** + * Creates a SearchResponseMerger from the Supplier of {@link SearchResponseMerger} held by the AsyncSearchTask. + * The supplier will be null for local-only searches and CCS minimize_roundtrips=true. In those cases, + * this method returns null. + * + * Otherwise, it creates a new SearchResponseMerger and populates it with all the SearchResponses + * received so far (via the updateResponseMinimizeRoundtrips method). + * + * @param task holds the Supplier of SearchResponseMerger + * @return SearchResponseMerger with all responses collected to so far or null + * (for local-only/CCS minimize_roundtrips=false) + */ + private SearchResponseMerger createSearchResponseMerger(AsyncSearchTask task) { + if (task.getSearchResponseMergerSupplier() == null) { + return null; // local search and CCS minimize_roundtrips=false + } + return task.getSearchResponseMergerSupplier().get(); + } + + private SearchResponse getMergedResponse(SearchResponseMerger merger) { + return getMergedResponse(merger, null); + } + + private SearchResponse getMergedResponse(SearchResponseMerger merger, SearchResponse localPartialAggsOnly) { + if (clusterResponses != null) { + for (SearchResponse response : clusterResponses) { + merger.add(response); + } + } + if (localPartialAggsOnly != null) { + merger.add(localPartialAggsOnly); + } + return merger.getMergedResponse(clusters); + } + /** * Creates an {@link AsyncStatusResponse} -- status of an async response. * Response is created based on the current state of the mutable response or based on {@code finalResponse} if it is available. @@ -297,15 +398,20 @@ synchronized AsyncSearchResponse toAsyncSearchResponse( if (this.failure != null) { reduceException.addSuppressed(this.failure); } - return new AsyncSearchResponse( - task.getExecutionId().getEncoded(), - buildResponse(task.getStartTimeNanos(), null), - reduceException, - isPartial, - frozen == false, - task.getStartTime(), - expirationTime - ); + var response = buildResponse(task.getStartTimeNanos(), null); + try { + return new AsyncSearchResponse( + task.getExecutionId().getEncoded(), + response, + reduceException, + isPartial, + frozen == false, + task.getStartTime(), + expirationTime + ); + } finally { + response.decRef(); + } } private void failIfFrozen() { @@ -373,9 +479,14 @@ private String getShardsInResponseMismatchInfo(SearchResponse response, boolean } @Override - public void close() { + public synchronized void close() { if (finalResponse != null) { finalResponse.decRef(); } + if (clusterResponses != null) { + for (SearchResponse clusterResponse : clusterResponses) { + clusterResponse.decRef(); + } + } } } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java index 8f554d4d8705c..f88207343bd60 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -34,9 +35,11 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler { static final Set RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM); private final SearchUsageHolder searchUsageHolder; + private final NamedWriteableRegistry namedWriteableRegistry; - public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder) { + public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) { this.searchUsageHolder = searchUsageHolder; + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -58,14 +61,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli // them as supported. We rely on SubmitAsyncSearchRequest#validate to fail in case they are set. // Note that ccs_minimize_roundtrips is also set this way, which is a supported option. request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - submit.getSearchRequest(), - request, - parser, - client.getNamedWriteableRegistry(), - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(submit.getSearchRequest(), request, parser, namedWriteableRegistry, setSize, searchUsageHolder) ); if (request.hasParam("wait_for_completion_timeout")) { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index f119e590cc75c..6083436bd09d3 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -97,304 +97,330 @@ public void testTaskDescription() { SearchRequest searchRequest = new SearchRequest("index1", "index2").source( new SearchSourceBuilder().query(QueryBuilders.termQuery("field", "value")) ); - AsyncSearchTask asyncSearchTask = new AsyncSearchTask( - 0L, - "", - "", - new TaskId("node1", 0), - searchRequest::buildDescription, - TimeValue.timeValueHours(1), - Collections.emptyMap(), - Collections.emptyMap(), - new AsyncExecutionId("0", new TaskId("node1", 1)), - new NoOpClient(threadPool), - threadPool, - (t) -> () -> null - ); - assertEquals(""" - async_search{indices[index1,index2], search_type[QUERY_THEN_FETCH], source\ - [{"query":{"term":{"field":{"value":"value"}}}}]}""", asyncSearchTask.getDescription()); + try ( + AsyncSearchTask asyncSearchTask = new AsyncSearchTask( + 0L, + "", + "", + new TaskId("node1", 0), + searchRequest::buildDescription, + TimeValue.timeValueHours(1), + Collections.emptyMap(), + Collections.emptyMap(), + new AsyncExecutionId("0", new TaskId("node1", 1)), + new NoOpClient(threadPool), + threadPool, + (t) -> () -> null + ) + ) { + assertEquals(""" + async_search{indices[index1,index2], search_type[QUERY_THEN_FETCH], source\ + [{"query":{"term":{"field":{"value":"value"}}}}]}""", asyncSearchTask.getDescription()); + } } public void testWaitForInit() throws InterruptedException { - AsyncSearchTask task = new AsyncSearchTask( - 0L, - "", - "", - new TaskId("node1", 0), - () -> null, - TimeValue.timeValueHours(1), - Collections.emptyMap(), - Collections.emptyMap(), - new AsyncExecutionId("0", new TaskId("node1", 1)), - new NoOpClient(threadPool), - threadPool, - (t) -> () -> null - ); - int numShards = randomIntBetween(0, 10); - List shards = new ArrayList<>(); - for (int i = 0; i < numShards; i++) { - shards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - List skippedShards = new ArrayList<>(); - int numSkippedShards = randomIntBetween(0, 10); - for (int i = 0; i < numSkippedShards; i++) { - skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } + try ( + AsyncSearchTask task = new AsyncSearchTask( + 0L, + "", + "", + new TaskId("node1", 0), + () -> null, + TimeValue.timeValueHours(1), + Collections.emptyMap(), + Collections.emptyMap(), + new AsyncExecutionId("0", new TaskId("node1", 1)), + new NoOpClient(threadPool), + threadPool, + (t) -> () -> null + ) + ) { + int numShards = randomIntBetween(0, 10); + List shards = new ArrayList<>(); + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + List skippedShards = new ArrayList<>(); + int numSkippedShards = randomIntBetween(0, 10); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } - int numThreads = randomIntBetween(1, 10); - CountDownLatch latch = new CountDownLatch(numThreads); - for (int i = 0; i < numThreads; i++) { - Thread thread = new Thread(() -> task.addCompletionListener(ActionTestUtils.assertNoFailureListener(resp -> { - assertThat(numShards + numSkippedShards, equalTo(resp.getSearchResponse().getTotalShards())); - assertThat(numSkippedShards, equalTo(resp.getSearchResponse().getSkippedShards())); - assertThat(0, equalTo(resp.getSearchResponse().getFailedShards())); - latch.countDown(); - }), TimeValue.timeValueMillis(1))); - thread.start(); + int numThreads = randomIntBetween(1, 10); + CountDownLatch latch = new CountDownLatch(numThreads); + for (int i = 0; i < numThreads; i++) { + Thread thread = new Thread(() -> task.addCompletionListener(ActionTestUtils.assertNoFailureListener(resp -> { + assertThat(numShards + numSkippedShards, equalTo(resp.getSearchResponse().getTotalShards())); + assertThat(numSkippedShards, equalTo(resp.getSearchResponse().getSkippedShards())); + assertThat(0, equalTo(resp.getSearchResponse().getFailedShards())); + latch.countDown(); + }), TimeValue.timeValueMillis(1))); + thread.start(); + } + assertFalse(latch.await(numThreads * 2, TimeUnit.MILLISECONDS)); + task.getSearchProgressActionListener() + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + latch.await(); } - assertFalse(latch.await(numThreads * 2, TimeUnit.MILLISECONDS)); - task.getSearchProgressActionListener() - .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - latch.await(); } public void testWithFailure() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - int numThreads = randomIntBetween(1, 10); - CountDownLatch latch = new CountDownLatch(numThreads); - for (int i = 0; i < numThreads; i++) { - Thread thread = new Thread(() -> task.addCompletionListener(ActionTestUtils.assertNoFailureListener(resp -> { - assertNull(resp.getSearchResponse()); - assertNotNull(resp.getFailure()); - assertTrue(resp.isPartial()); - latch.countDown(); - }), TimeValue.timeValueMillis(1))); - thread.start(); + try (AsyncSearchTask task = createAsyncSearchTask()) { + int numThreads = randomIntBetween(1, 10); + CountDownLatch latch = new CountDownLatch(numThreads); + for (int i = 0; i < numThreads; i++) { + Thread thread = new Thread(() -> task.addCompletionListener(ActionTestUtils.assertNoFailureListener(resp -> { + assertNull(resp.getSearchResponse()); + assertNotNull(resp.getFailure()); + assertTrue(resp.isPartial()); + latch.countDown(); + }), TimeValue.timeValueMillis(1))); + thread.start(); + } + assertFalse(latch.await(numThreads * 2, TimeUnit.MILLISECONDS)); + task.getSearchProgressActionListener().onFailure(new Exception("boom")); + latch.await(); } - assertFalse(latch.await(numThreads * 2, TimeUnit.MILLISECONDS)); - task.getSearchProgressActionListener().onFailure(new Exception("boom")); - latch.await(); } public void testWithFailureAndGetResponseFailureDuringReduction() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - task.getSearchProgressActionListener() - .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - InternalAggregations aggs = InternalAggregations.from( - Collections.singletonList( - new StringTerms( - "name", - BucketOrder.key(true), - BucketOrder.key(true), - 1, - 1, - Collections.emptyMap(), - DocValueFormat.RAW, - 1, - false, - 1, - Collections.emptyList(), - 0L - ) - ) - ); - task.getSearchProgressActionListener() - .onPartialReduce(Collections.emptyList(), new TotalHits(0, TotalHits.Relation.EQUAL_TO), aggs, 1); - task.getSearchProgressActionListener().onFailure(new CircuitBreakingException("boom", CircuitBreaker.Durability.TRANSIENT)); AtomicReference response = new AtomicReference<>(); - CountDownLatch latch = new CountDownLatch(1); - task.addCompletionListener(new ActionListener<>() { - @Override - public void onResponse(AsyncSearchResponse asyncSearchResponse) { - assertTrue(response.compareAndSet(null, asyncSearchResponse)); - latch.countDown(); - } + try (AsyncSearchTask task = createAsyncSearchTask()) { + task.getSearchProgressActionListener() + .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList( + new StringTerms( + "name", + BucketOrder.key(true), + BucketOrder.key(true), + 1, + 1, + Collections.emptyMap(), + DocValueFormat.RAW, + 1, + false, + 1, + Collections.emptyList(), + 0L + ) + ) + ); + task.getSearchProgressActionListener() + .onPartialReduce(Collections.emptyList(), new TotalHits(0, TotalHits.Relation.EQUAL_TO), aggs, 1); + task.getSearchProgressActionListener().onFailure(new CircuitBreakingException("boom", CircuitBreaker.Durability.TRANSIENT)); + CountDownLatch latch = new CountDownLatch(1); + task.addCompletionListener(new ActionListener<>() { + @Override + public void onResponse(AsyncSearchResponse asyncSearchResponse) { + assertTrue(response.compareAndSet(null, asyncSearchResponse)); + asyncSearchResponse.mustIncRef(); + latch.countDown(); + } - @Override - public void onFailure(Exception e) { - throw new AssertionError("onFailure should not be called"); - } - }, TimeValue.timeValueMillis(10L)); - assertTrue(latch.await(1, TimeUnit.SECONDS)); + @Override + public void onFailure(Exception e) { + throw new AssertionError("onFailure should not be called"); + } + }, TimeValue.timeValueMillis(10L)); + assertTrue(latch.await(1, TimeUnit.SECONDS)); + } AsyncSearchResponse asyncSearchResponse = response.get(); - assertNotNull(response.get().getSearchResponse()); - assertEquals(0, response.get().getSearchResponse().getTotalShards()); - assertEquals(0, response.get().getSearchResponse().getSuccessfulShards()); - assertEquals(0, response.get().getSearchResponse().getFailedShards()); - Exception failure = asyncSearchResponse.getFailure(); - assertThat(failure, instanceOf(ElasticsearchException.class)); - assertEquals("Async search: error while reducing partial results", failure.getMessage()); - assertEquals(1, failure.getSuppressed().length); - assertThat(failure.getSuppressed()[0], instanceOf(ElasticsearchException.class)); - assertEquals("error while executing search", failure.getSuppressed()[0].getMessage()); - assertThat(failure.getSuppressed()[0].getCause(), instanceOf(CircuitBreakingException.class)); - assertEquals("boom", failure.getSuppressed()[0].getCause().getMessage()); + try { + assertNotNull(response.get().getSearchResponse()); + assertEquals(0, response.get().getSearchResponse().getTotalShards()); + assertEquals(0, response.get().getSearchResponse().getSuccessfulShards()); + assertEquals(0, response.get().getSearchResponse().getFailedShards()); + Exception failure = asyncSearchResponse.getFailure(); + assertThat(failure, instanceOf(ElasticsearchException.class)); + assertEquals("Async search: error while reducing partial results", failure.getMessage()); + assertEquals(1, failure.getSuppressed().length); + assertThat(failure.getSuppressed()[0], instanceOf(ElasticsearchException.class)); + assertEquals("error while executing search", failure.getSuppressed()[0].getMessage()); + assertThat(failure.getSuppressed()[0].getCause(), instanceOf(CircuitBreakingException.class)); + assertEquals("boom", failure.getSuppressed()[0].getCause().getMessage()); + } finally { + asyncSearchResponse.decRef(); + } } public void testWaitForCompletion() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - int numShards = randomIntBetween(0, 10); - List shards = new ArrayList<>(); - for (int i = 0; i < numShards; i++) { - shards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - List skippedShards = new ArrayList<>(); - int numSkippedShards = randomIntBetween(0, 10); - for (int i = 0; i < numSkippedShards; i++) { - skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - int totalShards = numShards + numSkippedShards; - task.getSearchProgressActionListener() - .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - for (int i = 0; i < numShards; i++) { + try (AsyncSearchTask task = createAsyncSearchTask()) { + int numShards = randomIntBetween(0, 10); + List shards = new ArrayList<>(); + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + List skippedShards = new ArrayList<>(); + int numSkippedShards = randomIntBetween(0, 10); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + int totalShards = numShards + numSkippedShards; + task.getSearchProgressActionListener() + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + for (int i = 0; i < numShards; i++) { + task.getSearchProgressActionListener() + .onPartialReduce(shards.subList(i, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + assertCompletionListeners(task, totalShards, 1 + numSkippedShards, numSkippedShards, 0, true, false); + } task.getSearchProgressActionListener() - .onPartialReduce(shards.subList(i, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - assertCompletionListeners(task, totalShards, 1 + numSkippedShards, numSkippedShards, 0, true, false); + .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); + ActionListener.respondAndRelease( + (AsyncSearchTask.Listener) task.getProgressListener(), + newSearchResponse(totalShards, totalShards, numSkippedShards) + ); + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, false, false); } - task.getSearchProgressActionListener() - .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); - ((AsyncSearchTask.Listener) task.getProgressListener()).onResponse(newSearchResponse(totalShards, totalShards, numSkippedShards)); - assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, false, false); } public void testWithFetchFailures() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - int numShards = randomIntBetween(2, 10); - List shards = new ArrayList<>(); - for (int i = 0; i < numShards; i++) { - shards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - List skippedShards = new ArrayList<>(); - int numSkippedShards = randomIntBetween(0, 10); - for (int i = 0; i < numSkippedShards; i++) { - skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - int totalShards = numShards + numSkippedShards; - task.getSearchProgressActionListener() - .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - for (int i = 0; i < numShards; i++) { + try (AsyncSearchTask task = createAsyncSearchTask()) { + int numShards = randomIntBetween(2, 10); + List shards = new ArrayList<>(); + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + List skippedShards = new ArrayList<>(); + int numSkippedShards = randomIntBetween(0, 10); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + int totalShards = numShards + numSkippedShards; task.getSearchProgressActionListener() - .onPartialReduce(shards.subList(i, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - assertCompletionListeners(task, totalShards, 1 + numSkippedShards, numSkippedShards, 0, true, false); - } - task.getSearchProgressActionListener() - .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - int numFetchFailures = randomIntBetween(1, numShards - 1); - ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFetchFailures]; - for (int i = 0; i < numFetchFailures; i++) { - IOException failure = new IOException("boum"); - // fetch failures are currently ignored, they come back with onFailure or onResponse anyways - task.getSearchProgressActionListener().onFetchFailure(i, new SearchShardTarget("0", new ShardId("0", "0", 1), null), failure); - shardSearchFailures[i] = new ShardSearchFailure(failure); + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + for (int i = 0; i < numShards; i++) { + task.getSearchProgressActionListener() + .onPartialReduce(shards.subList(i, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + assertCompletionListeners(task, totalShards, 1 + numSkippedShards, numSkippedShards, 0, true, false); + } + task.getSearchProgressActionListener() + .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + int numFetchFailures = randomIntBetween(1, numShards - 1); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFetchFailures]; + for (int i = 0; i < numFetchFailures; i++) { + IOException failure = new IOException("boum"); + // fetch failures are currently ignored, they come back with onFailure or onResponse anyways + task.getSearchProgressActionListener() + .onFetchFailure(i, new SearchShardTarget("0", new ShardId("0", "0", 1), null), failure); + shardSearchFailures[i] = new ShardSearchFailure(failure); + } + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); + ActionListener.respondAndRelease( + (AsyncSearchTask.Listener) task.getProgressListener(), + newSearchResponse(totalShards, totalShards - numFetchFailures, numSkippedShards, shardSearchFailures) + ); + assertCompletionListeners(task, totalShards, totalShards - numFetchFailures, numSkippedShards, numFetchFailures, false, false); } - assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); - ((AsyncSearchTask.Listener) task.getProgressListener()).onResponse( - newSearchResponse(totalShards, totalShards - numFetchFailures, numSkippedShards, shardSearchFailures) - ); - assertCompletionListeners(task, totalShards, totalShards - numFetchFailures, numSkippedShards, numFetchFailures, false, false); } public void testFatalFailureDuringFetch() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - int numShards = randomIntBetween(0, 10); - List shards = new ArrayList<>(); - for (int i = 0; i < numShards; i++) { - shards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - List skippedShards = new ArrayList<>(); - int numSkippedShards = randomIntBetween(0, 10); - for (int i = 0; i < numSkippedShards; i++) { - skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - int totalShards = numShards + numSkippedShards; - task.getSearchProgressActionListener() - .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - for (int i = 0; i < numShards; i++) { + try (AsyncSearchTask task = createAsyncSearchTask()) { + int numShards = randomIntBetween(0, 10); + List shards = new ArrayList<>(); + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + List skippedShards = new ArrayList<>(); + int numSkippedShards = randomIntBetween(0, 10); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + int totalShards = numShards + numSkippedShards; task.getSearchProgressActionListener() - .onPartialReduce(shards.subList(0, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - assertCompletionListeners(task, totalShards, i + 1 + numSkippedShards, numSkippedShards, 0, true, false); - } - task.getSearchProgressActionListener() - .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); - for (int i = 0; i < numShards; i++) { - // fetch failures are currently ignored, they come back with onFailure or onResponse anyways + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + for (int i = 0; i < numShards; i++) { + task.getSearchProgressActionListener() + .onPartialReduce(shards.subList(0, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + assertCompletionListeners(task, totalShards, i + 1 + numSkippedShards, numSkippedShards, 0, true, false); + } task.getSearchProgressActionListener() - .onFetchFailure(i, new SearchShardTarget("0", new ShardId("0", "0", 1), null), new IOException("boum")); + .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + for (int i = 0; i < numShards; i++) { + // fetch failures are currently ignored, they come back with onFailure or onResponse anyways + task.getSearchProgressActionListener() + .onFetchFailure(i, new SearchShardTarget("0", new ShardId("0", "0", 1), null), new IOException("boum")); + } + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); + ((AsyncSearchTask.Listener) task.getProgressListener()).onFailure(new IOException("boum")); + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, true); } - assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); - ((AsyncSearchTask.Listener) task.getProgressListener()).onFailure(new IOException("boum")); - assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, true); } public void testFatalFailureWithNoCause() throws InterruptedException { - AsyncSearchTask task = createAsyncSearchTask(); - AsyncSearchTask.Listener listener = task.getSearchProgressActionListener(); - int numShards = randomIntBetween(0, 10); - List shards = new ArrayList<>(); - for (int i = 0; i < numShards; i++) { - shards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - List skippedShards = new ArrayList<>(); - int numSkippedShards = randomIntBetween(0, 10); - for (int i = 0; i < numSkippedShards; i++) { - skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); - } - int totalShards = numShards + numSkippedShards; - task.getSearchProgressActionListener() - .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + try (AsyncSearchTask task = createAsyncSearchTask()) { + AsyncSearchTask.Listener listener = task.getSearchProgressActionListener(); + int numShards = randomIntBetween(0, 10); + List shards = new ArrayList<>(); + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + List skippedShards = new ArrayList<>(); + int numSkippedShards = randomIntBetween(0, 10); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(null, new ShardId("0", "0", 1))); + } + int totalShards = numShards + numSkippedShards; + task.getSearchProgressActionListener() + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - listener.onFailure(new SearchPhaseExecutionException("fetch", "boum", ShardSearchFailure.EMPTY_ARRAY)); - assertCompletionListeners(task, totalShards, 0, numSkippedShards, 0, true, true); + listener.onFailure(new SearchPhaseExecutionException("fetch", "boum", ShardSearchFailure.EMPTY_ARRAY)); + assertCompletionListeners(task, totalShards, 0, numSkippedShards, 0, true, true); + } } public void testAddCompletionListenerScheduleErrorWaitForInitListener() throws InterruptedException { throwOnSchedule = true; - AsyncSearchTask asyncSearchTask = createAsyncSearchTask(); - AtomicReference failure = new AtomicReference<>(); - CountDownLatch latch = new CountDownLatch(1); - // onListShards has not been executed, then addCompletionListener has to wait for the - // onListShards call and is executed as init listener - asyncSearchTask.addCompletionListener(new ActionListener<>() { - @Override - public void onResponse(AsyncSearchResponse asyncSearchResponse) { - throw new AssertionError("onResponse should not be called"); - } + AtomicReference failure; + try (AsyncSearchTask asyncSearchTask = createAsyncSearchTask()) { + failure = new AtomicReference<>(); + CountDownLatch latch = new CountDownLatch(1); + // onListShards has not been executed, then addCompletionListener has to wait for the + // onListShards call and is executed as init listener + asyncSearchTask.addCompletionListener(new ActionListener<>() { + @Override + public void onResponse(AsyncSearchResponse asyncSearchResponse) { + throw new AssertionError("onResponse should not be called"); + } - @Override - public void onFailure(Exception e) { - assertTrue(failure.compareAndSet(null, e)); - latch.countDown(); - } - }, TimeValue.timeValueMillis(500L)); - asyncSearchTask.getSearchProgressActionListener() - .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - assertTrue(latch.await(1000, TimeUnit.SECONDS)); + @Override + public void onFailure(Exception e) { + assertTrue(failure.compareAndSet(null, e)); + latch.countDown(); + } + }, TimeValue.timeValueMillis(500L)); + asyncSearchTask.getSearchProgressActionListener() + .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + assertTrue(latch.await(1000, TimeUnit.SECONDS)); + } assertThat(failure.get(), instanceOf(RuntimeException.class)); } public void testAddCompletionListenerScheduleErrorInitListenerExecutedImmediately() throws InterruptedException { throwOnSchedule = true; - AsyncSearchTask asyncSearchTask = createAsyncSearchTask(); - asyncSearchTask.getSearchProgressActionListener() - .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); - CountDownLatch latch = new CountDownLatch(1); - AtomicReference failure = new AtomicReference<>(); - // onListShards has already been executed, then addCompletionListener is executed immediately - asyncSearchTask.addCompletionListener(new ActionListener<>() { - @Override - public void onResponse(AsyncSearchResponse asyncSearchResponse) { - throw new AssertionError("onResponse should not be called"); - } + AtomicReference failure; + try (AsyncSearchTask asyncSearchTask = createAsyncSearchTask()) { + asyncSearchTask.getSearchProgressActionListener() + .onListShards(Collections.emptyList(), Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + CountDownLatch latch = new CountDownLatch(1); + failure = new AtomicReference<>(); + // onListShards has already been executed, then addCompletionListener is executed immediately + asyncSearchTask.addCompletionListener(new ActionListener<>() { + @Override + public void onResponse(AsyncSearchResponse asyncSearchResponse) { + throw new AssertionError("onResponse should not be called"); + } - @Override - public void onFailure(Exception e) { - assertTrue(failure.compareAndSet(null, e)); - latch.countDown(); - } - }, TimeValue.timeValueMillis(500L)); - assertTrue(latch.await(1000, TimeUnit.SECONDS)); + @Override + public void onFailure(Exception e) { + assertTrue(failure.compareAndSet(null, e)); + latch.countDown(); + } + }, TimeValue.timeValueMillis(500L)); + assertTrue(latch.await(1000, TimeUnit.SECONDS)); + } assertThat(failure.get(), instanceOf(RuntimeException.class)); } diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java index e41b3ef87fcb3..2786d9772108a 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.search; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; @@ -26,6 +27,7 @@ import static org.elasticsearch.xpack.core.async.GetAsyncResultRequestTests.randomSearchId; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104838") public class AsyncStatusResponseTests extends AbstractWireSerializingTestCase { @Override diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java index 57cab31e7aaaf..0130746ab1702 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java @@ -8,6 +8,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; @@ -26,6 +27,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { @@ -33,7 +35,7 @@ public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder()); + action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class)); controller().registerHandler(action); } diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java index 5f724509ec98a..2406fc6b4e92a 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.TestShardRoutingRoleStrategies; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -337,7 +338,8 @@ public void testScaleWhileShrinking() throws Exception { assertBusy(() -> { refreshClusterInfo(); final ClusterInfo clusterInfo = getClusterInfo(); - final long freeBytes = clusterInfo.getNodeMostAvailableDiskUsages().get(dataNode2Id).getFreeBytes(); + DiskUsage usage = clusterInfo.getNodeMostAvailableDiskUsages().get(dataNode2Id); + final long freeBytes = usage.freeBytes(); assertThat(freeBytes, is(equalTo(enoughSpaceForColocation))); }); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java index e9d54826436c2..60220391a2165 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java @@ -130,6 +130,7 @@ public List> getSettings() { @Override public List getRestHandlers( final Settings settings, + NamedWriteableRegistry namedWriteableRegistry, final RestController controller, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java index 69c14ffc65e58..d3be1816924fb 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.autoscaling.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -23,7 +22,7 @@ public class DeleteAutoscalingPolicyAction extends ActionType { @@ -49,11 +48,6 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeString(name); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public boolean equals(final Object o) { if (this == o) return true; diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java index 7944fb6738b0f..4a356f74e03f8 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.autoscaling.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -33,7 +32,7 @@ public class GetAutoscalingCapacityAction extends ActionType { @@ -51,11 +50,6 @@ public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { return new CancellableTask(id, type, action, "", parentTaskId, headers); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingPolicyAction.java index d01fb600ccbfe..12f1363151bec 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingPolicyAction.java @@ -26,7 +26,7 @@ public class GetAutoscalingPolicyAction extends ActionType { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java index fe0cda75503e0..0de558121fa50 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java @@ -40,7 +40,7 @@ public class PutAutoscalingPolicyAction extends ActionType public static final String NAME = "cluster:admin/autoscaling/put_autoscaling_policy"; private PutAutoscalingPolicyAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java index 672ddad9ea189..d88fa19b18f49 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java @@ -300,7 +300,7 @@ private boolean nodeHasAccurateCapacity(DiscoveryNode node) { DiskUsage mostAvailable = clusterInfo.getNodeMostAvailableDiskUsages().get(node.getId()); DiskUsage leastAvailable = clusterInfo.getNodeLeastAvailableDiskUsages().get(node.getId()); if (mostAvailable == null - || mostAvailable.getPath().equals(leastAvailable.getPath()) == false + || mostAvailable.path().equals(leastAvailable.path()) == false || totalStorage(clusterInfo.getNodeMostAvailableDiskUsages(), node) < 0) { return false; } @@ -340,7 +340,7 @@ private AutoscalingCapacity.AutoscalingResources resourcesFor(DiscoveryNode node private static long totalStorage(Map diskUsages, DiscoveryNode node) { DiskUsage diskUsage = diskUsages.get(node.getId()); - return diskUsage != null ? diskUsage.getTotalBytes() : -1; + return diskUsage != null ? diskUsage.totalBytes() : -1; } private boolean rolesFilter(DiscoveryNode discoveryNode) { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 7eb3cca18efd0..ffa3a7308da90 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -681,9 +681,8 @@ long unmovableSize(String nodeId, Collection shards) { return 0; } - long threshold = diskThresholdSettings.getFreeBytesThresholdHighStage(ByteSizeValue.ofBytes(diskUsage.getTotalBytes())) - .getBytes(); - long missing = threshold - diskUsage.getFreeBytes(); + long threshold = diskThresholdSettings.getFreeBytesThresholdHighStage(ByteSizeValue.ofBytes(diskUsage.totalBytes())).getBytes(); + long missing = threshold - diskUsage.freeBytes(); return Math.max(missing, shards.stream().mapToLong(this::sizeOf).min().orElseThrow()); } @@ -980,7 +979,7 @@ public static class ReactiveReason implements AutoscalingDeciderResult.Reason { static final int MAX_AMOUNT_OF_SHARDS = 512; private static final TransportVersion SHARD_IDS_OUTPUT_VERSION = TransportVersions.V_8_4_0; - private static final TransportVersion UNASSIGNED_NODE_DECISIONS_OUTPUT_VERSION = TransportVersions.V_8_500_020; + private static final TransportVersion UNASSIGNED_NODE_DECISIONS_OUTPUT_VERSION = TransportVersions.V_8_9_X; private final String reason; private final long unassigned; diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java index bbd8e7ddc5a53..4061d37832184 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityServiceTests.java @@ -248,7 +248,7 @@ public void testContext() { DiskUsage diskUsage = new DiskUsage(nodeId, null, randomAlphaOfLength(5), total, randomLongBetween(0, total)); leastUsages.put(nodeId, diskUsage); if (randomBoolean()) { - diskUsage = new DiskUsage(nodeId, null, diskUsage.getPath(), total, diskUsage.getFreeBytes()); + diskUsage = new DiskUsage(nodeId, null, diskUsage.path(), total, diskUsage.freeBytes()); } mostUsages.put(nodeId, diskUsage); sumTotal += total; @@ -305,9 +305,9 @@ public void testContext() { new DiskUsage( multiPathNodeId, null, - randomValueOtherThan(original.getPath(), () -> randomAlphaOfLength(5)), - original.getTotalBytes(), - original.getFreeBytes() + randomValueOtherThan(original.path(), () -> randomAlphaOfLength(5)), + original.totalBytes(), + original.freeBytes() ) ); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ByteRange.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ByteRange.java index f58f61c987143..7395a3203b315 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ByteRange.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ByteRange.java @@ -23,7 +23,7 @@ private ByteRange(long start, long end) { this.start = start; this.end = end; assert start >= 0L : "Start must be >= 0 but saw [" + start + "]"; - assert end >= start : "End must be greater or equal to start but saw [" + start + "][" + start + "]"; + assert end >= start : "End must be greater or equal to start but saw [" + end + "][" + start + "]"; } public long start() { diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 5e8933f86ae7d..2c5997e479209 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.blobcache.BlobCacheMetrics; @@ -25,7 +26,6 @@ import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RelativeByteSizeValue; -import org.elasticsearch.common.util.concurrent.AbstractAsyncTask; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Assertions; @@ -49,7 +49,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; @@ -57,6 +56,7 @@ import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.function.IntConsumer; import java.util.function.Predicate; @@ -253,6 +253,25 @@ public void validate(ByteSizeValue value, Map, Object> settings, bool Setting.Property.NodeScope ); + // used in tests + void computeDecay() { + if (cache instanceof LFUCache lfuCache) { + lfuCache.computeDecay(); + } + } + + // used in tests + void maybeScheduleDecayAndNewEpoch() { + if (cache instanceof LFUCache lfuCache) { + lfuCache.maybeScheduleDecayAndNewEpoch(lfuCache.epoch.get()); + } + } + + // used in tests + long epoch() { + return ((LFUCache) cache).epoch.get(); + } + private interface Cache extends Releasable { CacheEntry get(K cacheKey, long fileLength, int region); @@ -302,6 +321,8 @@ private CacheEntry(T chunk) { private final BlobCacheMetrics blobCacheMetrics; + private final Runnable evictIncrementer; + public SharedBlobCacheService( NodeEnvironment environment, Settings settings, @@ -360,6 +381,7 @@ public SharedBlobCacheService( this.recoveryRangeSize = BlobCacheUtils.toIntBytes(SHARED_CACHE_RECOVERY_RANGE_SIZE_SETTING.get(settings).getBytes()); this.blobCacheMetrics = blobCacheMetrics; + this.evictIncrementer = blobCacheMetrics.getEvictedCountNonZeroFrequency()::increment; } public static long calculateCacheSize(Settings settings, long totalFsSize) { @@ -428,6 +450,10 @@ private int getRegionSize(long fileLength, int region) { return effectiveRegionSize; } + public int getRegionSize() { + return regionSize; + } + CacheFileRegion get(KeyType cacheKey, long fileLength, int region) { return cache.get(cacheKey, fileLength, region).chunk; } @@ -495,6 +521,61 @@ public boolean maybeFetchFullEntry(KeyType cacheKey, long length, RangeMissingHa return true; } + /** + * Fetch and write in cache a region of a blob if there are enough free pages in the cache to do so. + * + * This method returns as soon as the download tasks are instantiated, but the tasks themselves + * are run on the bulk executor. + * + * If an exception is thrown from the writer then the cache entry being downloaded is freed + * and unlinked + * + * @param cacheKey the key to fetch data for + * @param region the region of the blob to fetch + * @param blobLength the length of the blob from which the region is fetched (used to compute the size of the ending region) + * @param writer a writer that handles writing of newly downloaded data to the shared cache + * @param listener a listener that is completed with {@code true} if the current thread triggered the fetching of the region, in which + * case the data is available in cache. The listener is completed with {@code false} in every other cases: if the + * region to write is already available in cache, if the region is pending fetching via another thread or if there is + * not enough free pages to fetch the region. + */ + public void maybeFetchRegion( + final KeyType cacheKey, + final int region, + final long blobLength, + final RangeMissingHandler writer, + final ActionListener listener + ) { + if (freeRegionCount() < 1 && maybeEvictLeastUsed() == false) { + // no free page available and no old enough unused region to be evicted + listener.onResponse(false); + return; + } + long regionLength = regionSize; + try { + if (region == getEndingRegion(blobLength)) { + regionLength = blobLength - getRegionStart(region); + } + ByteRange regionRange = ByteRange.of(0, regionLength); + if (regionRange.isEmpty()) { + listener.onResponse(false); + return; + } + final CacheFileRegion entry = get(cacheKey, blobLength, region); + entry.populate(regionRange, writer, bulkIOExecutor, listener); + } catch (Exception e) { + listener.onFailure(e); + } + } + + // used by tests + boolean maybeEvictLeastUsed() { + if (cache instanceof LFUCache lfuCache) { + return lfuCache.maybeEvictLeastUsed(); + } + return false; + } + private static void throwAlreadyClosed(String message) { throw new AlreadyClosedException(message); } @@ -595,10 +676,16 @@ public final boolean isEvicted() { } } + /** + * While this class has incRef and tryIncRef methods, incRefEnsureOpen and tryIncrefEnsureOpen should + * always be used, ensuring the right ordering between incRef/tryIncRef and ensureOpen + * (see {@link LFUCache#maybeEvictAndTakeForFrequency(Runnable, int)}) + */ class CacheFileRegion extends EvictableRefCounted { final RegionKey regionKey; final SparseFileTracker tracker; + // io can be null when not init'ed or after evict/take volatile SharedBytes.IO io = null; CacheFileRegion(RegionKey regionKey, int regionSize) { @@ -612,6 +699,27 @@ public long physicalStartOffset() { return ioRef == null ? -1L : (long) regionKey.region * regionSize; } + public boolean tryIncRefEnsureOpen() { + if (tryIncRef()) { + ensureOpenOrDecRef(); + return true; + } + + return false; + } + + public void incRefEnsureOpen() { + incRef(); + ensureOpenOrDecRef(); + } + + private void ensureOpenOrDecRef() { + if (isEvicted()) { + decRef(); + throwAlreadyEvicted(); + } + } + // tries to evict this chunk if noone is holding onto its resources anymore // visible for tests. boolean tryEvict() { @@ -625,6 +733,17 @@ boolean tryEvict() { return false; } + boolean tryEvictNoDecRef() { + assert Thread.holdsLock(SharedBlobCacheService.this) : "must hold lock when evicting"; + if (refCount() <= 1 && evict()) { + logger.trace("evicted and take {} with channel offset {}", regionKey, physicalStartOffset()); + evictCount.increment(); + return true; + } + + return false; + } + public boolean forceEvict() { assert Thread.holdsLock(SharedBlobCacheService.this) : "must hold lock when evicting"; if (evict()) { @@ -647,23 +766,70 @@ protected void closeInternal() { logger.trace("closed {} with channel offset {}", regionKey, physicalStartOffset()); } - private void ensureOpen() { - if (isEvicted()) { - throwAlreadyEvicted(); - } - } - private static void throwAlreadyEvicted() { throwAlreadyClosed("File chunk is evicted"); } + /** + * Optimistically try to read from the region + * @return true if successful, i.e., not evicted and data available, false if evicted + */ boolean tryRead(ByteBuffer buf, long offset) throws IOException { - int readBytes = io.read(buf, getRegionRelativePosition(offset)); - if (isEvicted()) { - buf.position(buf.position() - readBytes); + SharedBytes.IO ioRef = this.io; + if (ioRef != null) { + int readBytes = ioRef.read(buf, getRegionRelativePosition(offset)); + if (isEvicted()) { + buf.position(buf.position() - readBytes); + return false; + } + return true; + } else { + // taken by someone else return false; } - return true; + } + + /** + * Populates a range in cache if the range is not available nor pending to be available in cache. + * + * @param rangeToWrite the range of bytes to populate + * @param writer a writer that handles writing of newly downloaded data to the shared cache + * @param executor the executor used to download and to write new dat + * @param listener a listener that is completed with {@code true} if the current thread triggered the download and write of the + * range, in which case the listener is completed once writing is done. The listener is completed with {@code false} + * if the range to write is already available in cache or if another thread will download and write the range, in + * which cases the listener is completed immediately. + */ + void populate( + final ByteRange rangeToWrite, + final RangeMissingHandler writer, + final Executor executor, + final ActionListener listener + ) { + Releasable resource = null; + try { + incRefEnsureOpen(); + resource = Releasables.releaseOnce(this::decRef); + final List gaps = tracker.waitForRange( + rangeToWrite, + rangeToWrite, + Assertions.ENABLED ? ActionListener.releaseAfter(ActionListener.running(() -> { + assert regionOwners.get(io) == this; + }), resource) : ActionListener.releasing(resource) + ); + final var hasGapsToFill = gaps.size() > 0; + try (RefCountingListener refs = new RefCountingListener(listener.map(unused -> hasGapsToFill))) { + if (hasGapsToFill) { + final var cacheFileRegion = CacheFileRegion.this; + for (SparseFileTracker.Gap gap : gaps) { + var fillGapRunnable = fillGapRunnable(cacheFileRegion, writer, gap); + executor.execute(ActionRunnable.run(refs.acquire(), fillGapRunnable::run)); + } + } + } + } catch (Exception e) { + releaseAndFail(listener, resource, e); + } } void populateAndRead( @@ -676,9 +842,8 @@ void populateAndRead( ) { Releasable resource = null; try { - incRef(); + incRefEnsureOpen(); resource = Releasables.releaseOnce(this::decRef); - ensureOpen(); final List gaps = tracker.waitForRange( rangeToWrite, rangeToRead, @@ -701,51 +866,49 @@ void populateAndRead( ); if (gaps.isEmpty() == false) { - fillGaps(executor, writer, gaps); + final var cacheFileRegion = CacheFileRegion.this; + for (SparseFileTracker.Gap gap : gaps) { + executor.execute(fillGapRunnable(cacheFileRegion, writer, gap)); + } } } catch (Exception e) { releaseAndFail(listener, resource, e); } } - private void fillGaps(Executor executor, RangeMissingHandler writer, List gaps) { - final var cacheFileRegion = CacheFileRegion.this; - for (SparseFileTracker.Gap gap : gaps) { - executor.execute(new AbstractRunnable() { - - @Override - protected void doRun() throws Exception { - ensureOpen(); - if (cacheFileRegion.tryIncRef() == false) { - throw new AlreadyClosedException("File chunk [" + cacheFileRegion.regionKey + "] has been released"); - } - try { - final int start = Math.toIntExact(gap.start()); - var ioRef = io; - assert regionOwners.get(ioRef) == cacheFileRegion; - writer.fillCacheRange( - ioRef, - start, - start, - Math.toIntExact(gap.end() - start), - progress -> gap.onProgress(start + progress) - ); - writeCount.increment(); - } finally { - cacheFileRegion.decRef(); - } - gap.onCompletion(); + private AbstractRunnable fillGapRunnable(CacheFileRegion cacheFileRegion, RangeMissingHandler writer, SparseFileTracker.Gap gap) { + return new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + if (cacheFileRegion.tryIncRefEnsureOpen() == false) { + throw new AlreadyClosedException("File chunk [" + cacheFileRegion.regionKey + "] has been released"); } - - @Override - public void onFailure(Exception e) { - gap.onFailure(e); + try { + final int start = Math.toIntExact(gap.start()); + var ioRef = io; + assert regionOwners.get(ioRef) == cacheFileRegion; + writer.fillCacheRange( + ioRef, + start, + start, + Math.toIntExact(gap.end() - start), + progress -> gap.onProgress(start + progress) + ); + writeCount.increment(); + } finally { + cacheFileRegion.decRef(); } - }); - } + gap.onCompletion(); + } + + @Override + public void onFailure(Exception e) { + gap.onFailure(e); + } + }; } - private static void releaseAndFail(ActionListener listener, Releasable decrementRef, Exception e) { + private static void releaseAndFail(ActionListener listener, Releasable decrementRef, Exception e) { try { Releasables.close(decrementRef); } catch (Exception ex) { @@ -945,6 +1108,8 @@ private RangeAvailableHandler readerWithOffset(RangeAvailableHandler reader, Cac } private boolean assertValidRegionAndLength(CacheFileRegion fileRegion, int channelPos, int len) { + assert fileRegion.io != null; + assert fileRegion.hasReferences(); assert regionOwners.get(fileRegion.io) == fileRegion; assert channelPos >= 0 && channelPos + len <= regionSize; return true; @@ -992,16 +1157,22 @@ class LFUCacheEntry extends CacheEntry { LFUCacheEntry prev; LFUCacheEntry next; int freq; - volatile long lastAccessed; + volatile long lastAccessedEpoch; LFUCacheEntry(CacheFileRegion chunk, long lastAccessed) { super(chunk); - this.lastAccessed = lastAccessed; + this.lastAccessedEpoch = lastAccessed; + // todo: consider whether freq=1 is still right for new entries. + // it could risk decaying to level 0 right after and thus potentially be evicted + // if the freq 1 LRU chain was short. + // seems ok for now, since if it were to get evicted soon, the decays done would ensure we have more level 1 + // entries eventually and thus such an entry would (after some decays) be able to survive in the cache. + this.freq = 1; } void touch() { - long now = threadPool.relativeTimeInMillis(); - if (now - lastAccessed >= minTimeDelta) { + long now = epoch.get(); + if (now > lastAccessedEpoch) { maybePromote(now, this); } } @@ -1010,21 +1181,20 @@ void touch() { private final ConcurrentHashMap, LFUCacheEntry> keyMapping = new ConcurrentHashMap<>(); private final LFUCacheEntry[] freqs; private final int maxFreq; - private final long minTimeDelta; - private final CacheDecayTask decayTask; + private final DecayAndNewEpochTask decayAndNewEpochTask; + + private final AtomicLong epoch = new AtomicLong(); @SuppressWarnings("unchecked") LFUCache(Settings settings) { this.maxFreq = SHARED_CACHE_MAX_FREQ_SETTING.get(settings); - this.minTimeDelta = SHARED_CACHE_MIN_TIME_DELTA_SETTING.get(settings).millis(); freqs = (LFUCacheEntry[]) Array.newInstance(LFUCacheEntry.class, maxFreq); - decayTask = new CacheDecayTask(threadPool, threadPool.generic(), SHARED_CACHE_DECAY_INTERVAL_SETTING.get(settings)); - decayTask.rescheduleIfNecessary(); + decayAndNewEpochTask = new DecayAndNewEpochTask(threadPool.generic()); } @Override public void close() { - decayTask.close(); + decayAndNewEpochTask.close(); } int getFreq(CacheFileRegion cacheFileRegion) { @@ -1034,7 +1204,7 @@ int getFreq(CacheFileRegion cacheFileRegion) { @Override public LFUCacheEntry get(KeyType cacheKey, long fileLength, int region) { final RegionKey regionKey = new RegionKey<>(cacheKey, region); - final long now = threadPool.relativeTimeInMillis(); + final long now = epoch.get(); // try to just get from the map on the fast-path to save instantiating the capturing lambda needed on the slow path // if we did not find an entry var entry = keyMapping.get(regionKey); @@ -1045,7 +1215,7 @@ public LFUCacheEntry get(KeyType cacheKey, long fileLength, int region) { // io is volatile, double locking is fine, as long as we assign it last. if (entry.chunk.io == null) { synchronized (entry.chunk) { - if (entry.chunk.io == null) { + if (entry.chunk.io == null && entry.chunk.isEvicted() == false) { return initChunk(entry); } } @@ -1053,7 +1223,7 @@ public LFUCacheEntry get(KeyType cacheKey, long fileLength, int region) { assert assertChunkActiveOrEvicted(entry); // existing item, check if we need to promote item - if (now - entry.lastAccessed >= minTimeDelta) { + if (now > entry.lastAccessedEpoch) { maybePromote(now, entry); } @@ -1097,7 +1267,7 @@ private LFUCacheEntry initChunk(LFUCacheEntry entry) { throwAlreadyClosed("no free region found (contender)"); } // new item - assert entry.freq == 0; + assert entry.freq == 1; assert entry.prev == null; assert entry.next == null; final SharedBytes.IO freeSlot = freeRegions.poll(); @@ -1106,16 +1276,15 @@ private LFUCacheEntry initChunk(LFUCacheEntry entry) { assignToSlot(entry, freeSlot); } else { // need to evict something - int frequency; + SharedBytes.IO io; synchronized (SharedBlobCacheService.this) { - frequency = maybeEvict(); + io = maybeEvictAndTake(evictIncrementer); } - if (frequency > 0) { - blobCacheMetrics.getEvictedCountNonZeroFrequency().increment(); + if (io == null) { + io = freeRegions.poll(); } - final SharedBytes.IO freeSlotRetry = freeRegions.poll(); - if (freeSlotRetry != null) { - assignToSlot(entry, freeSlotRetry); + if (io != null) { + assignToSlot(entry, io); } else { boolean removed = keyMapping.remove(regionKey, entry); assert removed; @@ -1202,16 +1371,19 @@ private boolean assertChunkActiveOrEvicted(LFUCacheEntry entry) { assert entry.prev != null || entry.chunk.isEvicted(); } - assert regionOwners.get(entry.chunk.io) == entry.chunk || entry.chunk.isEvicted(); + SharedBytes.IO io = entry.chunk.io; + assert io != null || entry.chunk.isEvicted(); + assert io == null || regionOwners.get(io) == entry.chunk || entry.chunk.isEvicted(); return true; } - private void maybePromote(long now, LFUCacheEntry entry) { + private void maybePromote(long epoch, LFUCacheEntry entry) { synchronized (SharedBlobCacheService.this) { - if (now - entry.lastAccessed >= minTimeDelta && entry.freq + 1 < maxFreq && entry.chunk.isEvicted() == false) { + if (epoch > entry.lastAccessedEpoch && entry.freq < maxFreq - 1 && entry.chunk.isEvicted() == false) { unlink(entry); - entry.freq++; - entry.lastAccessed = now; + // go 2 up per epoch, allowing us to decay 1 every epoch. + entry.freq = Math.min(entry.freq + 2, maxFreq - 1); + entry.lastAccessedEpoch = epoch; pushEntryToBack(entry); } } @@ -1243,62 +1415,208 @@ private void unlink(final LFUCacheEntry entry) { assert invariant(entry, false); } + private void appendLevel1ToLevel0() { + assert Thread.holdsLock(SharedBlobCacheService.this); + var front0 = freqs[0]; + var front1 = freqs[1]; + if (front0 == null) { + freqs[0] = front1; + freqs[1] = null; + decrementFreqList(front1); + assert front1 == null || invariant(front1, true); + } else if (front1 != null) { + var back0 = front0.prev; + var back1 = front1.prev; + assert invariant(front0, true); + assert invariant(front1, true); + assert invariant(back0, true); + assert invariant(back1, true); + + decrementFreqList(front1); + + front0.prev = back1; + back0.next = front1; + front1.prev = back0; + assert back1.next == null; + + freqs[1] = null; + + assert invariant(front0, true); + assert invariant(front1, true); + assert invariant(back0, true); + assert invariant(back1, true); + } + } + + private void decrementFreqList(LFUCacheEntry entry) { + while (entry != null) { + entry.freq--; + entry = entry.next; + } + } + /** * Cycles through the {@link LFUCacheEntry} from 0 to max frequency and - * tries to evict a chunk if no one is holding onto its resources anymore + * tries to evict a chunk if no one is holding onto its resources anymore. + * + * Also regularly polls for free regions and thus might steal one in case any become available. * - * @return the frequency of the evicted entry as integer or -1 if no entry was evicted from cache + * @return a now free IO region or null if none available. */ - private int maybeEvict() { + private SharedBytes.IO maybeEvictAndTake(Runnable evictedNotification) { assert Thread.holdsLock(SharedBlobCacheService.this); - for (int currentFreq = 0; currentFreq < maxFreq; currentFreq++) { - for (LFUCacheEntry entry = freqs[currentFreq]; entry != null; entry = entry.next) { + long currentEpoch = epoch.get(); // must be captured before attempting to evict a freq 0 + SharedBytes.IO freq0 = maybeEvictAndTakeForFrequency(evictedNotification, 0); + if (freqs[0] == null) { + // no frequency 0 entries, let us switch epoch and decay so we get some for next time. + maybeScheduleDecayAndNewEpoch(currentEpoch); + } + if (freq0 != null) { + return freq0; + } + for (int currentFreq = 1; currentFreq < maxFreq; currentFreq++) { + // recheck this per freq in case we raced an eviction with an incref'er. + SharedBytes.IO freeRegion = freeRegions.poll(); + if (freeRegion != null) { + return freeRegion; + } + SharedBytes.IO taken = maybeEvictAndTakeForFrequency(evictedNotification, currentFreq); + if (taken != null) { + return taken; + } + } + // give up + return null; + } + + private SharedBytes.IO maybeEvictAndTakeForFrequency(Runnable evictedNotification, int currentFreq) { + for (LFUCacheEntry entry = freqs[currentFreq]; entry != null; entry = entry.next) { + boolean evicted = entry.chunk.tryEvictNoDecRef(); + if (evicted) { + try { + SharedBytes.IO ioRef = entry.chunk.io; + if (ioRef != null) { + try { + if (entry.chunk.refCount() == 1) { + // we own that one refcount (since we CAS'ed evicted to 1) + // grab io, rely on incref'ers also checking evicted field. + entry.chunk.io = null; + assert regionOwners.remove(ioRef) == entry.chunk; + return ioRef; + } + } finally { + unlink(entry); + keyMapping.remove(entry.chunk.regionKey, entry); + } + } + } finally { + entry.chunk.decRef(); + if (currentFreq > 0) { + evictedNotification.run(); + } + } + } + } + return null; + } + + /** + * Check if a new epoch is needed based on the input. The input epoch should be captured + * before the determination that a new epoch is needed is done. + * @param currentEpoch the epoch to check against if a new epoch is needed + */ + private void maybeScheduleDecayAndNewEpoch(long currentEpoch) { + decayAndNewEpochTask.spawnIfNotRunning(currentEpoch); + } + + /** + * This method tries to evict the least used {@link LFUCacheEntry}. Only entries with the lowest possible frequency are considered + * for eviction. + * + * @return true if an entry was evicted, false otherwise. + */ + public boolean maybeEvictLeastUsed() { + synchronized (SharedBlobCacheService.this) { + for (LFUCacheEntry entry = freqs[0]; entry != null; entry = entry.next) { boolean evicted = entry.chunk.tryEvict(); if (evicted && entry.chunk.io != null) { unlink(entry); keyMapping.remove(entry.chunk.regionKey, entry); - return currentFreq; + return true; } } } - return -1; + return false; } private void computeDecay() { + long now = threadPool.rawRelativeTimeInMillis(); + long afterLock; + long end; synchronized (SharedBlobCacheService.this) { - long now = threadPool.relativeTimeInMillis(); - for (int i = 0; i < maxFreq; i++) { - for (LFUCacheEntry entry = freqs[i]; entry != null; entry = entry.next) { - if (entry.freq > 0 && now - entry.lastAccessed >= 2 * minTimeDelta) { - unlink(entry); - entry.freq--; - pushEntryToBack(entry); - } - } + afterLock = threadPool.rawRelativeTimeInMillis(); + appendLevel1ToLevel0(); + for (int i = 2; i < maxFreq; i++) { + assert freqs[i - 1] == null; + freqs[i - 1] = freqs[i]; + freqs[i] = null; + decrementFreqList(freqs[i - 1]); + assert freqs[i - 1] == null || invariant(freqs[i - 1], true); } } + end = threadPool.rawRelativeTimeInMillis(); + logger.debug("Decay took {} ms (acquire lock: {} ms)", end - now, afterLock - now); } - class CacheDecayTask extends AbstractAsyncTask { + class DecayAndNewEpochTask extends AbstractRunnable { + + private final Executor executor; + private final AtomicLong pendingEpoch = new AtomicLong(); + private volatile boolean isClosed; - CacheDecayTask(ThreadPool threadPool, Executor executor, TimeValue interval) { - super(logger, Objects.requireNonNull(threadPool), executor, Objects.requireNonNull(interval), true); + DecayAndNewEpochTask(Executor executor) { + this.executor = executor; } @Override - protected boolean mustReschedule() { - return true; + protected void doRun() throws Exception { + if (isClosed == false) { + computeDecay(); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("failed to run cache decay task", e); + } + + @Override + public void onAfter() { + assert pendingEpoch.get() == epoch.get() + 1; + epoch.incrementAndGet(); } @Override - public void runInternal() { - computeDecay(); + public void onRejection(Exception e) { + assert false : e; + logger.error("unexpected rejection", e); + epoch.incrementAndGet(); } @Override public String toString() { return "shared_cache_decay_task"; } + + public void spawnIfNotRunning(long currentEpoch) { + if (isClosed == false && pendingEpoch.compareAndSet(currentEpoch, currentEpoch + 1)) { + executor.execute(this); + } + } + + public void close() { + this.isClosed = true; + } } } } diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java index d861ff193112d..66a6cf4dbd949 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java @@ -9,6 +9,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.blobcache.BlobCacheMetrics; import org.elasticsearch.blobcache.common.ByteRange; @@ -32,8 +33,11 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -88,11 +92,11 @@ public void testBasicEviction() throws IOException { assertEquals(2, cacheService.freeRegionCount()); synchronized (cacheService) { - assertTrue(region1.tryEvict()); + assertTrue(tryEvict(region1)); } assertEquals(3, cacheService.freeRegionCount()); synchronized (cacheService) { - assertFalse(region1.tryEvict()); + assertFalse(tryEvict(region1)); } assertEquals(3, cacheService.freeRegionCount()); final var bytesReadFuture = new PlainActionFuture(); @@ -105,17 +109,17 @@ public void testBasicEviction() throws IOException { bytesReadFuture ); synchronized (cacheService) { - assertFalse(region0.tryEvict()); + assertFalse(tryEvict(region0)); } assertEquals(3, cacheService.freeRegionCount()); assertFalse(bytesReadFuture.isDone()); taskQueue.runAllRunnableTasks(); synchronized (cacheService) { - assertTrue(region0.tryEvict()); + assertTrue(tryEvict(region0)); } assertEquals(4, cacheService.freeRegionCount()); synchronized (cacheService) { - assertTrue(region2.tryEvict()); + assertTrue(tryEvict(region2)); } assertEquals(5, cacheService.freeRegionCount()); assertTrue(bytesReadFuture.isDone()); @@ -123,6 +127,18 @@ public void testBasicEviction() throws IOException { } } + private static boolean tryEvict(SharedBlobCacheService.CacheFileRegion region1) { + if (randomBoolean()) { + return region1.tryEvict(); + } else { + boolean result = region1.tryEvictNoDecRef(); + if (result) { + region1.decRef(); + } + return result; + } + } + public void testAutoEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") @@ -161,7 +177,7 @@ public void testAutoEviction() throws IOException { // explicitly evict region 1 synchronized (cacheService) { - assertTrue(region1.tryEvict()); + assertTrue(tryEvict(region1)); } assertEquals(1, cacheService.freeRegionCount()); } @@ -235,9 +251,10 @@ public void testForceEvictResponse() throws IOException { } public void testDecay() throws IOException { + // we have 8 regions Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(500)).getStringRep()) + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(400)).getStringRep()) .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(100)).getStringRep()) .put("path.home", createTempDir()) .build(); @@ -252,45 +269,152 @@ public void testDecay() throws IOException { BlobCacheMetrics.NOOP ) ) { + assertEquals(4, cacheService.freeRegionCount()); + final var cacheKey1 = generateCacheKey(); final var cacheKey2 = generateCacheKey(); - assertEquals(5, cacheService.freeRegionCount()); + final var cacheKey3 = generateCacheKey(); + // add a region that we can evict when provoking first decay + cacheService.get("evictkey", size(250), 0); + assertEquals(3, cacheService.freeRegionCount()); final var region0 = cacheService.get(cacheKey1, size(250), 0); - assertEquals(4, cacheService.freeRegionCount()); + assertEquals(2, cacheService.freeRegionCount()); final var region1 = cacheService.get(cacheKey2, size(250), 1); - assertEquals(3, cacheService.freeRegionCount()); + assertEquals(1, cacheService.freeRegionCount()); + final var region2 = cacheService.get(cacheKey3, size(250), 1); + assertEquals(0, cacheService.freeRegionCount()); - assertEquals(0, cacheService.getFreq(region0)); - assertEquals(0, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region0)); + assertEquals(1, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region2)); + AtomicLong expectedEpoch = new AtomicLong(); + Runnable triggerDecay = () -> { + assertThat(taskQueue.hasRunnableTasks(), is(false)); + cacheService.get(expectedEpoch.toString(), size(250), 0); + assertThat(taskQueue.hasRunnableTasks(), is(true)); + taskQueue.runAllRunnableTasks(); + assertThat(cacheService.epoch(), equalTo(expectedEpoch.incrementAndGet())); + }; - taskQueue.advanceTime(); - taskQueue.runAllRunnableTasks(); + triggerDecay.run(); + + cacheService.get(cacheKey1, size(250), 0); + cacheService.get(cacheKey2, size(250), 1); + cacheService.get(cacheKey3, size(250), 1); + + triggerDecay.run(); final var region0Again = cacheService.get(cacheKey1, size(250), 0); assertSame(region0Again, region0); - assertEquals(1, cacheService.getFreq(region0)); - assertEquals(0, cacheService.getFreq(region1)); + assertEquals(3, cacheService.getFreq(region0)); + assertEquals(1, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region2)); + + triggerDecay.run(); - taskQueue.advanceTime(); - taskQueue.runAllRunnableTasks(); cacheService.get(cacheKey1, size(250), 0); - assertEquals(2, cacheService.getFreq(region0)); + assertEquals(4, cacheService.getFreq(region0)); cacheService.get(cacheKey1, size(250), 0); + assertEquals(4, cacheService.getFreq(region0)); + assertEquals(0, cacheService.getFreq(region1)); + assertEquals(0, cacheService.getFreq(region2)); + + // ensure no freq=0 entries + cacheService.get(cacheKey2, size(250), 1); + cacheService.get(cacheKey3, size(250), 1); + assertEquals(2, cacheService.getFreq(region1)); + assertEquals(2, cacheService.getFreq(region2)); + + triggerDecay.run(); + + assertEquals(3, cacheService.getFreq(region0)); + assertEquals(1, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region2)); + + triggerDecay.run(); assertEquals(2, cacheService.getFreq(region0)); + assertEquals(0, cacheService.getFreq(region1)); + assertEquals(0, cacheService.getFreq(region2)); - // advance 2 ticks (decay only starts after 2 ticks) - taskQueue.advanceTime(); - taskQueue.runAllRunnableTasks(); - taskQueue.advanceTime(); - taskQueue.runAllRunnableTasks(); + // ensure no freq=0 entries + cacheService.get(cacheKey2, size(250), 1); + cacheService.get(cacheKey3, size(250), 1); + assertEquals(2, cacheService.getFreq(region1)); + assertEquals(2, cacheService.getFreq(region2)); + + triggerDecay.run(); assertEquals(1, cacheService.getFreq(region0)); - assertEquals(0, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region1)); + assertEquals(1, cacheService.getFreq(region2)); - // advance another tick - taskQueue.advanceTime(); - taskQueue.runAllRunnableTasks(); + triggerDecay.run(); assertEquals(0, cacheService.getFreq(region0)); assertEquals(0, cacheService.getFreq(region1)); + assertEquals(0, cacheService.getFreq(region2)); + } + } + + /** + * Test when many objects need to decay, in particular useful to measure how long the decay task takes. + * For 1M objects (with no assertions) it took 26ms locally. + */ + public void testMassiveDecay() throws IOException { + int regions = 1024; // to measure decay time, increase to 1024*1024 and disable assertions. + Settings settings = Settings.builder() + .put(NODE_NAME_SETTING.getKey(), "node") + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(regions)).getStringRep()) + .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(1)).getStringRep()) + .put("path.home", createTempDir()) + .build(); + final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); + try ( + NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + var cacheService = new SharedBlobCacheService<>( + environment, + settings, + taskQueue.getThreadPool(), + ThreadPool.Names.GENERIC, + BlobCacheMetrics.NOOP + ) + ) { + Runnable decay = () -> { + assertThat(taskQueue.hasRunnableTasks(), is(true)); + long before = System.currentTimeMillis(); + taskQueue.runAllRunnableTasks(); + long after = System.currentTimeMillis(); + logger.debug("took {} ms", (after - before)); + }; + long fileLength = size(regions + 100); + Object cacheKey = new Object(); + for (int i = 0; i < regions; ++i) { + cacheService.get(cacheKey, fileLength, i); + if (Integer.bitCount(i) == 1) { + logger.debug("did {} gets", i); + } + } + assertThat(taskQueue.hasRunnableTasks(), is(false)); + cacheService.get(cacheKey, fileLength, regions); + decay.run(); + int maxRounds = 5; + for (int round = 2; round <= maxRounds; ++round) { + for (int i = round; i < regions + round; ++i) { + cacheService.get(cacheKey, fileLength, i); + if (Integer.bitCount(i) == 1) { + logger.debug("did {} gets", i); + } + } + decay.run(); + } + + Map freqs = new HashMap<>(); + for (int i = maxRounds; i < regions + maxRounds; ++i) { + int freq = cacheService.getFreq(cacheService.get(cacheKey, fileLength, i)) - 2; + freqs.compute(freq, (k, v) -> v == null ? 1 : v + 1); + if (Integer.bitCount(i) == 1) { + logger.debug("did {} gets", i); + } + } + assertThat(freqs.get(4), equalTo(regions - maxRounds + 1)); } } @@ -300,12 +424,12 @@ public void testDecay() throws IOException { */ public void testGetMultiThreaded() throws IOException { int threads = between(2, 10); + int regionCount = between(1, 20); + // if we have enough regions, a get should always have a result (except for explicit evict interference) + final boolean allowAlreadyClosed = regionCount < threads; Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put( - SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), - ByteSizeValue.ofBytes(size(between(1, 20) * 100L)).getStringRep() - ) + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(regionCount * 100L)).getStringRep()) .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(100)).getStringRep()) .put(SharedBlobCacheService.SHARED_CACHE_MIN_TIME_DELTA_SETTING.getKey(), randomFrom("0", "1ms", "10s")) .put("path.home", createTempDir()) @@ -335,11 +459,13 @@ public void testGetMultiThreaded() throws IOException { ready.await(); for (int i = 0; i < iterations; ++i) { try { - SharedBlobCacheService.CacheFileRegion cacheFileRegion = cacheService.get( - cacheKeys[i], - fileLength, - regions[i] - ); + SharedBlobCacheService.CacheFileRegion cacheFileRegion; + try { + cacheFileRegion = cacheService.get(cacheKeys[i], fileLength, regions[i]); + } catch (AlreadyClosedException e) { + assert allowAlreadyClosed || e.getMessage().equals("evicted during free region allocation") : e; + throw e; + } if (cacheFileRegion.tryIncRef()) { if (yield[i] == 0) { Thread.yield(); @@ -697,6 +823,289 @@ public void testCacheSizeChanges() throws IOException { } } + public void testMaybeEvictLeastUsed() throws Exception { + final int numRegions = 10; + final long regionSize = size(1L); + Settings settings = Settings.builder() + .put(NODE_NAME_SETTING.getKey(), "node") + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(numRegions)).getStringRep()) + .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(regionSize).getStringRep()) + .put("path.home", createTempDir()) + .build(); + + final AtomicLong relativeTimeInMillis = new AtomicLong(0L); + final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); + try ( + NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + var cacheService = new SharedBlobCacheService<>( + environment, + settings, + taskQueue.getThreadPool(), + ThreadPool.Names.GENERIC, + "bulk", + BlobCacheMetrics.NOOP + ) + ) { + final Map.CacheFileRegion> cacheEntries = new HashMap<>(); + + assertThat("All regions are free", cacheService.freeRegionCount(), equalTo(numRegions)); + assertThat("Cache has no entries", cacheService.maybeEvictLeastUsed(), is(false)); + + // use all regions in cache + for (int i = 0; i < numRegions; i++) { + final var cacheKey = generateCacheKey(); + var entry = cacheService.get(cacheKey, regionSize, 0); + entry.populate( + ByteRange.of(0L, regionSize), + (channel, channelPos, relativePos, length, progressUpdater) -> progressUpdater.accept(length), + taskQueue.getThreadPool().generic(), + ActionListener.noop() + ); + assertThat(cacheService.getFreq(entry), equalTo(1)); + cacheEntries.put(cacheKey, entry); + } + + assertThat("All regions are used", cacheService.freeRegionCount(), equalTo(0)); + assertThat("Cache entries are not old enough to be evicted", cacheService.maybeEvictLeastUsed(), is(false)); + + taskQueue.runAllRunnableTasks(); + + assertThat("All regions are used", cacheService.freeRegionCount(), equalTo(0)); + assertThat("Cache entries are not old enough to be evicted", cacheService.maybeEvictLeastUsed(), is(false)); + + cacheService.maybeScheduleDecayAndNewEpoch(); + taskQueue.runAllRunnableTasks(); + + cacheEntries.keySet().forEach(key -> cacheService.get(key, regionSize, 0)); + cacheService.maybeScheduleDecayAndNewEpoch(); + taskQueue.runAllRunnableTasks(); + + // touch some random cache entries + var usedCacheKeys = Set.copyOf(randomSubsetOf(cacheEntries.keySet())); + usedCacheKeys.forEach(key -> cacheService.get(key, regionSize, 0)); + + cacheEntries.forEach( + (key, entry) -> assertThat(cacheService.getFreq(entry), usedCacheKeys.contains(key) ? equalTo(3) : equalTo(1)) + ); + + assertThat("All regions are used", cacheService.freeRegionCount(), equalTo(0)); + assertThat("Cache entries are not old enough to be evicted", cacheService.maybeEvictLeastUsed(), is(false)); + + cacheService.maybeScheduleDecayAndNewEpoch(); + taskQueue.runAllRunnableTasks(); + + assertThat("All regions are used", cacheService.freeRegionCount(), equalTo(0)); + cacheEntries.forEach( + (key, entry) -> assertThat(cacheService.getFreq(entry), usedCacheKeys.contains(key) ? equalTo(2) : equalTo(0)) + ); + + var zeroFrequencyCacheEntries = cacheEntries.size() - usedCacheKeys.size(); + for (int i = 0; i < zeroFrequencyCacheEntries; i++) { + assertThat(cacheService.freeRegionCount(), equalTo(i)); + assertThat("Cache entry is old enough to be evicted", cacheService.maybeEvictLeastUsed(), is(true)); + assertThat(cacheService.freeRegionCount(), equalTo(i + 1)); + } + + assertThat("No more cache entries old enough to be evicted", cacheService.maybeEvictLeastUsed(), is(false)); + assertThat(cacheService.freeRegionCount(), equalTo(zeroFrequencyCacheEntries)); + } + } + + public void testMaybeFetchRegion() throws Exception { + final long cacheSize = size(500L); + final long regionSize = size(100L); + Settings settings = Settings.builder() + .put(NODE_NAME_SETTING.getKey(), "node") + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(cacheSize).getStringRep()) + .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(regionSize).getStringRep()) + .put("path.home", createTempDir()) + .build(); + + AtomicInteger bulkTaskCount = new AtomicInteger(0); + ThreadPool threadPool = new TestThreadPool("test") { + @Override + public ExecutorService executor(String name) { + ExecutorService generic = super.executor(Names.GENERIC); + if (Objects.equals(name, "bulk")) { + return new StoppableExecutorServiceWrapper(generic) { + @Override + public void execute(Runnable command) { + super.execute(command); + bulkTaskCount.incrementAndGet(); + } + }; + } + return generic; + } + }; + try ( + NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + var cacheService = new SharedBlobCacheService<>( + environment, + settings, + threadPool, + ThreadPool.Names.GENERIC, + "bulk", + BlobCacheMetrics.NOOP + ) + ) { + { + // fetch a single region + final var cacheKey = generateCacheKey(); + assertEquals(5, cacheService.freeRegionCount()); + final long blobLength = size(250); // 3 regions + AtomicLong bytesRead = new AtomicLong(0L); + final PlainActionFuture future = new PlainActionFuture<>(); + cacheService.maybeFetchRegion(cacheKey, 0, blobLength, (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, future); + + var fetched = future.get(10, TimeUnit.SECONDS); + assertThat("Region has been fetched", fetched, is(true)); + assertEquals(regionSize, bytesRead.get()); + assertEquals(4, cacheService.freeRegionCount()); + assertEquals(1, bulkTaskCount.get()); + } + { + // fetch multiple regions to used all the cache + final int remainingFreeRegions = cacheService.freeRegionCount(); + assertEquals(4, cacheService.freeRegionCount()); + + final var cacheKey = generateCacheKey(); + final long blobLength = regionSize * remainingFreeRegions; + AtomicLong bytesRead = new AtomicLong(0L); + + final PlainActionFuture> future = new PlainActionFuture<>(); + final var listener = new GroupedActionListener<>(remainingFreeRegions, future); + for (int region = 0; region < remainingFreeRegions; region++) { + cacheService.maybeFetchRegion( + cacheKey, + region, + blobLength, + (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, + listener + ); + } + + var results = future.get(10, TimeUnit.SECONDS); + assertThat(results.stream().allMatch(result -> result), is(true)); + assertEquals(blobLength, bytesRead.get()); + assertEquals(0, cacheService.freeRegionCount()); + assertEquals(1 + remainingFreeRegions, bulkTaskCount.get()); + } + { + // cache fully used, no entry old enough to be evicted + assertEquals(0, cacheService.freeRegionCount()); + final var cacheKey = generateCacheKey(); + final PlainActionFuture future = new PlainActionFuture<>(); + cacheService.maybeFetchRegion( + cacheKey, + randomIntBetween(0, 10), + randomLongBetween(1L, regionSize), + (channel, channelPos, relativePos, length, progressUpdater) -> { + throw new AssertionError("should not be executed"); + }, + future + ); + assertThat("Listener is immediately completed", future.isDone(), is(true)); + assertThat("Region already exists in cache", future.get(), is(false)); + } + { + cacheService.computeDecay(); + + // fetch one more region should evict an old cache entry + final var cacheKey = generateCacheKey(); + assertEquals(0, cacheService.freeRegionCount()); + long blobLength = randomLongBetween(1L, regionSize); + AtomicLong bytesRead = new AtomicLong(0L); + final PlainActionFuture future = new PlainActionFuture<>(); + cacheService.maybeFetchRegion(cacheKey, 0, blobLength, (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, future); + + var fetched = future.get(10, TimeUnit.SECONDS); + assertThat("Region has been fetched", fetched, is(true)); + assertEquals(blobLength, bytesRead.get()); + assertEquals(0, cacheService.freeRegionCount()); + } + } + + threadPool.shutdown(); + } + + public void testPopulate() throws Exception { + final long regionSize = size(1L); + Settings settings = Settings.builder() + .put(NODE_NAME_SETTING.getKey(), "node") + .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(size(100)).getStringRep()) + .put(SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(), ByteSizeValue.ofBytes(regionSize).getStringRep()) + .put("path.home", createTempDir()) + .build(); + + final AtomicLong relativeTimeInMillis = new AtomicLong(0L); + final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); + try ( + NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + var cacheService = new SharedBlobCacheService<>( + environment, + settings, + taskQueue.getThreadPool(), + ThreadPool.Names.GENERIC, + ThreadPool.Names.GENERIC, + BlobCacheMetrics.NOOP + ) + ) { + final var cacheKey = generateCacheKey(); + final var blobLength = size(12L); + + // start populating the first region + var entry = cacheService.get(cacheKey, blobLength, 0); + AtomicLong bytesWritten = new AtomicLong(0L); + final PlainActionFuture future1 = new PlainActionFuture<>(); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future1); + + assertThat(future1.isDone(), is(false)); + assertThat(taskQueue.hasRunnableTasks(), is(true)); + + // start populating the second region + entry = cacheService.get(cacheKey, blobLength, 1); + final PlainActionFuture future2 = new PlainActionFuture<>(); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future2); + + // start populating again the first region, listener should be called immediately + entry = cacheService.get(cacheKey, blobLength, 0); + final PlainActionFuture future3 = new PlainActionFuture<>(); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future3); + + assertThat(future3.isDone(), is(true)); + var written = future3.get(10L, TimeUnit.SECONDS); + assertThat(written, is(false)); + + taskQueue.runAllRunnableTasks(); + + written = future1.get(10L, TimeUnit.SECONDS); + assertThat(future1.isDone(), is(true)); + assertThat(written, is(true)); + written = future2.get(10L, TimeUnit.SECONDS); + assertThat(future2.isDone(), is(true)); + assertThat(written, is(true)); + } + } + private void assertThatNonPositiveRecoveryRangeSizeRejected(Setting setting) { final String value = randomFrom(ByteSizeValue.MINUS_ONE, ByteSizeValue.ZERO).getStringRep(); final Settings settings = Settings.builder() diff --git a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml index df74a608dbe84..fb37e9a05c5cf 100644 --- a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml +++ b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_stats.yml @@ -54,6 +54,7 @@ ccr.follow_stats: index: bar - match: { indices.0.index: "bar" } + - match: { indices.0.total_global_checkpoint_lag: 0 } - match: { indices.0.shards.0.leader_index: "foo" } - match: { indices.0.shards.0.follower_index: "bar" } - match: { indices.0.shards.0.shard_id: 0 } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 7234b7babffdc..4a3a92aa80bc8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -137,7 +137,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E public static final String CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY = "remote_cluster_name"; public static final String REQUESTED_OPS_MISSING_METADATA_KEY = "es.requested_operations_missing"; - public static final TransportVersion TRANSPORT_VERSION_ACTION_WITH_SHARD_ID = TransportVersions.V_8_500_020; + public static final TransportVersion TRANSPORT_VERSION_ACTION_WITH_SHARD_ID = TransportVersions.V_8_9_X; private final boolean enabled; private final Settings settings; @@ -257,6 +257,7 @@ public List> getPersistentTasksExecutor( public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index 4a88aaf4a5389..e4af826ba5066 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -12,6 +12,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; @@ -23,6 +25,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.FilterClient; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -39,6 +42,7 @@ import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.license.RemoteClusterLicenseChecker; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.ccr.action.CcrRequests; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.core.ClientHelper; @@ -155,7 +159,7 @@ public void checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs( final DataStream remoteDataStream = indexAbstraction.getParentDataStream() != null ? indexAbstraction.getParentDataStream() : null; - hasPrivilegesToFollowIndices(remoteClient, new String[] { leaderIndex }, e -> { + hasPrivilegesToFollowIndices(client.threadPool().getThreadContext(), remoteClient, new String[] { leaderIndex }, e -> { if (e == null) { fetchLeaderHistoryUUIDs( remoteClient, @@ -193,7 +197,8 @@ public static void checkRemoteClusterLicenseAndFetchClusterState( final Consumer leaderClusterStateConsumer ) { try { - Client remoteClient = systemClient( + var remoteClient = systemClient( + client.threadPool().getThreadContext(), client.getRemoteClusterClient(clusterAlias, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) ); checkRemoteClusterLicenseAndFetchClusterState( @@ -231,7 +236,7 @@ public static void checkRemoteClusterLicenseAndFetchClusterState( private static void checkRemoteClusterLicenseAndFetchClusterState( final Client client, final String clusterAlias, - final Client remoteClient, + final RemoteClusterClient remoteClient, final ClusterStateRequest request, final Consumer onFailure, final Consumer leaderClusterStateConsumer, @@ -251,7 +256,7 @@ public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseChe onFailure ); // following an index in remote cluster, so use remote client to fetch leader index metadata - remoteClient.admin().cluster().state(request, clusterStateListener); + remoteClient.execute(ClusterStateAction.REMOTE_TYPE, request, clusterStateListener); } else { onFailure.accept(nonCompliantLicense.apply(licenseCheck)); } @@ -277,7 +282,7 @@ public void onFailure(final Exception e) { // NOTE: Placed this method here; in order to avoid duplication of logic for fetching history UUIDs // in case of following a local or a remote cluster. public static void fetchLeaderHistoryUUIDs( - final Client remoteClient, + final RemoteClusterClient remoteClient, final IndexMetadata leaderIndexMetadata, final Consumer onFailure, final Consumer historyUUIDConsumer @@ -321,7 +326,7 @@ public static void fetchLeaderHistoryUUIDs( IndicesStatsRequest request = new IndicesStatsRequest(); request.clear(); request.indices(leaderIndex); - remoteClient.admin().indices().stats(request, ActionListener.wrap(indicesStatsHandler, onFailure)); + remoteClient.execute(IndicesStatsAction.REMOTE_TYPE, request, ActionListener.wrap(indicesStatsHandler, onFailure)); } /** @@ -333,7 +338,12 @@ public static void fetchLeaderHistoryUUIDs( * @param indices the indices * @param handler the callback */ - public void hasPrivilegesToFollowIndices(final Client remoteClient, final String[] indices, final Consumer handler) { + public void hasPrivilegesToFollowIndices( + final ThreadContext threadContext, + final RemoteClusterClient remoteClient, + final String[] indices, + final Consumer handler + ) { Objects.requireNonNull(remoteClient, "remoteClient"); Objects.requireNonNull(indices, "indices"); if (indices.length == 0) { @@ -345,7 +355,7 @@ public void hasPrivilegesToFollowIndices(final Client remoteClient, final String return; } - final User user = getUser(remoteClient); + final User user = getUser(threadContext); if (user == null) { handler.accept(new IllegalStateException("missing or unable to read authentication info on request")); return; @@ -382,13 +392,44 @@ public void hasPrivilegesToFollowIndices(final Client remoteClient, final String handler.accept(Exceptions.authorizationError(message.toString())); } }; - remoteClient.execute(HasPrivilegesAction.INSTANCE, request, ActionListener.wrap(responseHandler, handler)); + remoteClient.execute(HasPrivilegesAction.REMOTE_TYPE, request, ActionListener.wrap(responseHandler, handler)); + } + + User getUser(ThreadContext threadContext) { + return new SecurityContext(Settings.EMPTY, threadContext).getUser(); } - User getUser(final Client remoteClient) { - final ThreadContext threadContext = remoteClient.threadPool().getThreadContext(); - final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); - return securityContext.getUser(); + public static RemoteClusterClient wrapRemoteClusterClient( + ThreadContext threadContext, + RemoteClusterClient client, + Map headers, + ClusterState clusterState + ) { + if (headers.isEmpty()) { + return client; + } else { + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterState); + if (filteredHeaders.isEmpty()) { + return client; + } + return new RemoteClusterClient() { + @Override + public void execute( + RemoteClusterActionType action, + Request request, + ActionListener listener + ) { + ClientHelper.executeWithHeadersAsync( + threadContext, + filteredHeaders, + null, + request, + listener, + (r, l) -> client.execute(action, r, l) + ); + } + }; + } } public static Client wrapClient(Client client, Map headers, ClusterState clusterState) { @@ -412,19 +453,18 @@ protected void } } - private static Client systemClient(Client client) { - final ThreadContext threadContext = client.threadPool().getThreadContext(); - return new FilterClient(client) { + private static RemoteClusterClient systemClient(ThreadContext threadContext, RemoteClusterClient delegate) { + return new RemoteClusterClient() { @Override - protected void doExecute( - ActionType action, + public void execute( + RemoteClusterActionType action, Request request, ActionListener listener ) { final Supplier supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); - super.doExecute(action, request, new ContextPreservingActionListener<>(supplier, listener)); + delegate.execute(action, request, new ContextPreservingActionListener<>(supplier, listener)); } } }; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java index a73a3dc5d715f..cdb5bf67b4712 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -75,7 +75,7 @@ public static Optional syncAddRetentionLea final ShardId leaderShardId, final String retentionLeaseId, final long retainingSequenceNumber, - final Client remoteClient, + final RemoteClusterClient remoteClient, final TimeValue timeout ) { try { @@ -103,7 +103,7 @@ public static void asyncAddRetentionLease( final ShardId leaderShardId, final String retentionLeaseId, final long retainingSequenceNumber, - final Client remoteClient, + final RemoteClusterClient remoteClient, final ActionListener listener ) { final RetentionLeaseActions.AddRequest request = new RetentionLeaseActions.AddRequest( @@ -112,7 +112,7 @@ public static void asyncAddRetentionLease( retainingSequenceNumber, "ccr" ); - remoteClient.execute(RetentionLeaseActions.ADD, request, listener); + remoteClient.execute(RetentionLeaseActions.REMOTE_ADD, request, listener); } /** @@ -130,7 +130,7 @@ public static Optional syncRenewRetentionLease( final ShardId leaderShardId, final String retentionLeaseId, final long retainingSequenceNumber, - final Client remoteClient, + final RemoteClusterClient remoteClient, final TimeValue timeout ) { try { @@ -158,7 +158,7 @@ public static void asyncRenewRetentionLease( final ShardId leaderShardId, final String retentionLeaseId, final long retainingSequenceNumber, - final Client remoteClient, + final RemoteClusterClient remoteClient, final ActionListener listener ) { final RetentionLeaseActions.RenewRequest request = new RetentionLeaseActions.RenewRequest( @@ -167,7 +167,7 @@ public static void asyncRenewRetentionLease( retainingSequenceNumber, "ccr" ); - remoteClient.execute(RetentionLeaseActions.RENEW, request, listener); + remoteClient.execute(RetentionLeaseActions.REMOTE_RENEW, request, listener); } /** @@ -183,11 +183,11 @@ public static void asyncRenewRetentionLease( public static void asyncRemoveRetentionLease( final ShardId leaderShardId, final String retentionLeaseId, - final Client remoteClient, + final RemoteClusterClient remoteClient, final ActionListener listener ) { final RetentionLeaseActions.RemoveRequest request = new RetentionLeaseActions.RemoveRequest(leaderShardId, retentionLeaseId); - remoteClient.execute(RetentionLeaseActions.REMOVE, request, listener); + remoteClient.execute(RetentionLeaseActions.REMOTE_REMOVE, request, listener); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java index 815a61297767a..77eea2a452bb7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java @@ -9,10 +9,11 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RequestValidators; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -53,7 +54,7 @@ public static PutMappingRequest putMappingRequest(String followerIndex, MappingM * must be at least the provided {@code mappingVersion} and {@code metadataVersion} respectively. */ public static void getIndexMetadata( - Client client, + RemoteClusterClient client, Index index, long mappingVersion, long metadataVersion, @@ -64,7 +65,7 @@ public static void getIndexMetadata( if (metadataVersion > 0) { request.waitForMetadataVersion(metadataVersion).waitForTimeout(timeoutSupplier.get()); } - client.admin().cluster().state(request, listener.delegateFailureAndWrap((delegate, response) -> { + client.execute(ClusterStateAction.REMOTE_TYPE, request, listener.delegateFailureAndWrap((delegate, response) -> { if (response.getState() == null) { // timeout on wait_for_metadata_version assert metadataVersion > 0 : metadataVersion; if (timeoutSupplier.get().nanos() < 0) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 6b990d033cae5..ca6fb5683e540 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -62,9 +63,10 @@ public class ShardChangesAction extends ActionType public static final ShardChangesAction INSTANCE = new ShardChangesAction(); public static final String NAME = "indices:data/read/xpack/ccr/shard_changes"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>(NAME, Response::new); private ShardChangesAction() { - super(NAME, ShardChangesAction.Response::new); + super(NAME); } public static class Request extends SingleShardRequest implements RawIndexingDataTransportRequest { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index b73aab1dbfd55..05945ff3e79a8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -26,6 +27,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -86,6 +88,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient; +import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapRemoteClusterClient; import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.extractLeaderShardHistoryUUIDs; public final class ShardFollowTasksExecutor extends PersistentTasksExecutor { @@ -244,9 +247,11 @@ protected void innerUpdateSettings(final LongConsumer finalHandler, final Consum } }; try { - remoteClient(params).admin() - .cluster() - .state(CcrRequests.metadataRequest(leaderIndex.getName()), ActionListener.wrap(onResponse, errorHandler)); + remoteClient(params).execute( + ClusterStateAction.REMOTE_TYPE, + CcrRequests.metadataRequest(leaderIndex.getName()), + ActionListener.wrap(onResponse, errorHandler) + ); } catch (NoSuchRemoteClusterException e) { errorHandler.accept(e); } @@ -371,9 +376,11 @@ protected void innerUpdateAliases(final LongConsumer handler, final Consumer filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( threadPool.getThreadContext(), clusterService.state() @@ -108,7 +108,7 @@ protected void masterOperation( Consumer consumer = remoteClusterState -> { String[] indices = request.getLeaderIndexPatterns().toArray(new String[0]); - ccrLicenseChecker.hasPrivilegesToFollowIndices(remoteClient, indices, e -> { + ccrLicenseChecker.hasPrivilegesToFollowIndices(client.threadPool().getThreadContext(), remoteClient, indices, e -> { if (e == null) { submitUnbatchedTask( "put-auto-follow-pattern-" + request.getRemoteCluster(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index 481f5f1817be5..dcbd8dfc70c4a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -120,7 +121,7 @@ public void clusterStateProcessed(final ClusterState oldState, final ClusterStat ); final int numberOfShards = IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.get(indexMetadata.getSettings()); - final Client remoteClient; + final RemoteClusterClient remoteClient; try { remoteClient = client.getRemoteClusterClient(remoteClusterName, remoteClientResponseExecutor); } catch (Exception e) { @@ -178,7 +179,7 @@ private void removeRetentionLeaseForShard( final ShardId followerShardId, final ShardId leaderShardId, final String retentionLeaseId, - final Client remoteClient, + final RemoteClusterClient remoteClient, final ActionListener listener ) { logger.trace("{} removing retention lease [{}] while unfollowing leader index", followerShardId, retentionLeaseId); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java index eb0cb95da5dff..71e0b644d1a95 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java @@ -14,6 +14,6 @@ public class BulkShardOperationsAction extends ActionType REMOTE_TYPE = RemoteClusterActionType.emptyResponse(NAME); + public static final RemoteClusterActionType REMOTE_INTERNAL_TYPE = RemoteClusterActionType.emptyResponse( + INTERNAL_NAME + ); + private ClearCcrRestoreSessionAction() { this(INTERNAL_NAME); } private ClearCcrRestoreSessionAction(String name) { - super(name, in -> ActionResponse.Empty.INSTANCE); + super(name); } abstract static class TransportDeleteCcrRestoreSessionAction extends HandledTransportAction< diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java index 2b33e18d83bfb..f8e4cda1501b6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -24,7 +23,7 @@ public class DeleteInternalCcrRepositoryAction extends ActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + GetCcrRestoreFileChunkResponse::new + ); + public static final RemoteClusterActionType REMOTE_INTERNAL_TYPE = new RemoteClusterActionType<>( + INTERNAL_NAME, + GetCcrRestoreFileChunkResponse::new + ); private GetCcrRestoreFileChunkAction() { this(INTERNAL_NAME); } private GetCcrRestoreFileChunkAction(String name) { - super(name, GetCcrRestoreFileChunkAction.GetCcrRestoreFileChunkResponse::new); + super(name); } abstract static class TransportGetCcrRestoreFileChunkAction extends HandledTransportAction< diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java index 1354c82a715d2..3641cda5fc7d9 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; @@ -37,13 +38,21 @@ public class PutCcrRestoreSessionAction extends ActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + PutCcrRestoreSessionResponse::new + ); + public static final RemoteClusterActionType REMOTE_INTERNAL_TYPE = new RemoteClusterActionType<>( + INTERNAL_NAME, + PutCcrRestoreSessionResponse::new + ); private PutCcrRestoreSessionAction() { - super(INTERNAL_NAME, PutCcrRestoreSessionResponse::new); + super(INTERNAL_NAME); } private PutCcrRestoreSessionAction(String name) { - super(name, PutCcrRestoreSessionResponse::new); + super(name); } abstract static class TransportPutCcrRestoreSessionAction extends TransportSingleShardAction< diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java index 68c3ff97e26fe..0de1715e17a14 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -24,7 +23,7 @@ public class PutInternalCcrRepositoryAction extends ActionType( threadPool.getThreadContext(), - l -> getRemoteClusterClient().admin() - .cluster() - .prepareState() - .clear() - .setMetadata(true) - .setNodes(true) - .setMasterNodeTimeout(TimeValue.MAX_VALUE) - .execute(l.map(ClusterStateResponse::getState)) + l -> getRemoteClusterClient().execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest().clear().metadata(true).nodes(true).masterNodeTimeout(TimeValue.MAX_VALUE), + l.map(ClusterStateResponse::getState) + ) ); } @@ -175,7 +178,7 @@ public RepositoryMetadata getMetadata() { return metadata; } - private Client getRemoteClusterClient() { + private RemoteClusterClient getRemoteClusterClient() { return client.getRemoteClusterClient(remoteClusterAlias, remoteClientResponseExecutor); } @@ -208,12 +211,13 @@ public void getSnapshotInfo(GetSnapshotInfoContext context) { @Override public Metadata getSnapshotGlobalMetadata(SnapshotId snapshotId) { assert SNAPSHOT_ID.equals(snapshotId) : "RemoteClusterRepository only supports " + SNAPSHOT_ID + " as the SnapshotId"; - Client remoteClient = getRemoteClusterClient(); + var remoteClient = getRemoteClusterClient(); // We set a single dummy index name to avoid fetching all the index data - ClusterStateResponse clusterState = remoteClient.admin() - .cluster() - .state(CcrRequests.metadataRequest("dummy_index_name")) - .actionGet(ccrSettings.getRecoveryActionTimeout()); + ClusterStateResponse clusterState = PlainActionFuture.get( + f -> remoteClient.execute(ClusterStateAction.REMOTE_TYPE, CcrRequests.metadataRequest("dummy_index_name"), f), + ccrSettings.getRecoveryActionTimeout().millis(), + TimeUnit.MILLISECONDS + ); return clusterState.getState().metadata(); } @@ -221,12 +225,13 @@ public Metadata getSnapshotGlobalMetadata(SnapshotId snapshotId) { public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) { assert SNAPSHOT_ID.equals(snapshotId) : "RemoteClusterRepository only supports " + SNAPSHOT_ID + " as the SnapshotId"; String leaderIndex = index.getName(); - Client remoteClient = getRemoteClusterClient(); + var remoteClient = getRemoteClusterClient(); - ClusterStateResponse clusterState = remoteClient.admin() - .cluster() - .state(CcrRequests.metadataRequest(leaderIndex)) - .actionGet(ccrSettings.getRecoveryActionTimeout()); + ClusterStateResponse clusterState = PlainActionFuture.get( + f -> remoteClient.execute(ClusterStateAction.REMOTE_TYPE, CcrRequests.metadataRequest(leaderIndex), f), + ccrSettings.getRecoveryActionTimeout().millis(), + TimeUnit.MILLISECONDS + ); // Validates whether the leader cluster has been configured properly: PlainActionFuture future = new PlainActionFuture<>(); @@ -371,7 +376,7 @@ public void restoreShard( final Index leaderIndex = new Index(leaderIndexName, leaderUUID); final ShardId leaderShardId = new ShardId(leaderIndex, shardId.getId()); - final Client remoteClient = getRemoteClusterClient(); + final var remoteClient = getRemoteClusterClient(); final String retentionLeaseId = retentionLeaseId(localClusterName, shardId.getIndex(), remoteClusterAlias, leaderIndex); @@ -450,7 +455,7 @@ void acquireRetentionLeaseOnLeader( final ShardId shardId, final String retentionLeaseId, final ShardId leaderShardId, - final Client remoteClient + final RemoteClusterClient remoteClient ) { logger.trace(() -> format("%s requesting leader to add retention lease [%s]", shardId, retentionLeaseId)); final TimeValue timeout = ccrSettings.getRecoveryActionTimeout(); @@ -507,12 +512,15 @@ void acquireRetentionLeaseOnLeader( public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId index, ShardId shardId) { assert SNAPSHOT_ID.equals(snapshotId) : "RemoteClusterRepository only supports " + SNAPSHOT_ID + " as the SnapshotId"; final String leaderIndex = index.getName(); - final IndicesStatsResponse response = getRemoteClusterClient().admin() - .indices() - .prepareStats(leaderIndex) - .clear() - .setStore(true) - .get(ccrSettings.getRecoveryActionTimeout()); + final IndicesStatsResponse response = PlainActionFuture.get( + f -> getRemoteClusterClient().execute( + IndicesStatsAction.REMOTE_TYPE, + new IndicesStatsRequest().indices(leaderIndex).clear().store(true), + f + ), + ccrSettings.getRecoveryActionTimeout().millis(), + TimeUnit.MILLISECONDS + ); for (ShardStats shardStats : response.getIndex(leaderIndex).getShards()) { final ShardRouting shardRouting = shardStats.getShardRouting(); if (shardRouting.shardId().id() == shardId.getId() && shardRouting.primary() && shardRouting.active()) { @@ -542,7 +550,7 @@ public void cloneShardSnapshot( public void awaitIdle() {} private void updateMappings( - Client leaderClient, + RemoteClusterClient leaderClient, Index leaderIndex, long leaderMappingVersion, Client followerClient, @@ -565,7 +573,7 @@ private void updateMappings( void openSession( String repositoryName, - Client remoteClient, + RemoteClusterClient remoteClient, ShardId leaderShardId, ShardId indexShardId, RecoveryState recoveryState, @@ -590,7 +598,7 @@ void openSession( ) ); remoteClient.execute( - PutCcrRestoreSessionAction.INTERNAL_INSTANCE, + PutCcrRestoreSessionAction.REMOTE_INTERNAL_TYPE, new PutCcrRestoreSessionRequest(sessionUUID, leaderShardId), ListenerTimeouts.wrapWithTimeout( threadPool, @@ -604,7 +612,7 @@ void openSession( private static class RestoreSession extends FileRestoreContext { - private final Client remoteClient; + private final RemoteClusterClient remoteClient; private final String sessionUUID; private final DiscoveryNode node; private final Store.MetadataSnapshot sourceMetadata; @@ -617,7 +625,7 @@ private static class RestoreSession extends FileRestoreContext { RestoreSession( String repositoryName, - Client remoteClient, + RemoteClusterClient remoteClient, String sessionUUID, DiscoveryNode node, ShardId shardId, @@ -684,7 +692,7 @@ protected FileChunk nextChunkRequest(StoreFileMetadata md) { @Override protected void executeChunkRequest(FileChunk request, ActionListener listener) { remoteClient.execute( - GetCcrRestoreFileChunkAction.INTERNAL_INSTANCE, + GetCcrRestoreFileChunkAction.REMOTE_INTERNAL_TYPE, new GetCcrRestoreFileChunkRequest(node, sessionUUID, request.md.name(), request.bytesRequested, leaderShardId), ListenerTimeouts.wrapWithTimeout(threadPool, listener.map(getCcrRestoreFileChunkResponse -> { writeFileChunk(request.md, getCcrRestoreFileChunkResponse); @@ -748,7 +756,7 @@ public void close(ActionListener listener) { ClearCcrRestoreSessionAction.INTERNAL_NAME ); ClearCcrRestoreSessionRequest clearRequest = new ClearCcrRestoreSessionRequest(sessionUUID, node, leaderShardId); - remoteClient.execute(ClearCcrRestoreSessionAction.INTERNAL_INSTANCE, clearRequest, closeListener.map(empty -> null)); + remoteClient.execute(ClearCcrRestoreSessionAction.REMOTE_INTERNAL_TYPE, clearRequest, closeListener.map(empty -> null)); } private record FileChunk(StoreFileMetadata md, int bytesRequested, boolean lastChunk) implements MultiChunkTransfer.ChunkRequest {} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index ea4bc8c92047a..4ce64bc41d6a1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -15,12 +15,12 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.client.internal.Client; @@ -461,8 +461,8 @@ protected final Index resolveFollowerIndex(String index) { return new Index(index, uuid); } - protected final RefreshResponse refresh(Client client, String... indices) { - RefreshResponse actionGet = client.admin().indices().prepareRefresh(indices).get(); + protected final BroadcastResponse refresh(Client client, String... indices) { + BroadcastResponse actionGet = client.admin().indices().prepareRefresh(indices).get(); assertNoFailures(actionGet); return actionGet; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseCheckerTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseCheckerTests.java index 6a6afbb0e8571..5ab10c5799295 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseCheckerTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseCheckerTests.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.ccr; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.user.User; @@ -25,17 +27,22 @@ public void testNoAuthenticationInfo() { final CcrLicenseChecker checker = new CcrLicenseChecker(() -> isCcrAllowed, () -> true) { @Override - User getUser(final Client remoteClient) { + User getUser(final ThreadContext threadContext) { return null; } }; final AtomicBoolean invoked = new AtomicBoolean(); - checker.hasPrivilegesToFollowIndices(mock(Client.class), new String[] { randomAlphaOfLength(8) }, e -> { - invoked.set(true); - assertThat(e, instanceOf(IllegalStateException.class)); - assertThat(e, hasToString(containsString("missing or unable to read authentication info on request"))); - }); + checker.hasPrivilegesToFollowIndices( + new ThreadContext(Settings.EMPTY), + mock(RemoteClusterClient.class), + new String[] { randomAlphaOfLength(8) }, + e -> { + invoked.set(true); + assertThat(e, instanceOf(IllegalStateException.class)); + assertThat(e, hasToString(containsString("missing or unable to read authentication info on request"))); + } + ); assertTrue(invoked.get()); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index d1dad46c5515c..d08b4c0f503d8 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RedirectToLocalClusterRemoteClusterClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; @@ -96,7 +97,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testAutoFollower() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterState("logs-20190101", true); @@ -166,7 +167,7 @@ void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List pa public void testAutoFollower_dataStream() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterStateWithDataStream("logs-foobar"); @@ -236,7 +237,7 @@ void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List pa public void testAutoFollowerClusterStateApiFailure() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); AutoFollowPattern autoFollowPattern = createAutoFollowPattern("remote", "logs-*"); Map patterns = new HashMap<>(); @@ -285,7 +286,7 @@ void updateAutoFollowMetadata(Function updateFunctio public void testAutoFollowerUpdateClusterStateFailure() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterState("logs-20190101", true); AutoFollowPattern autoFollowPattern = createAutoFollowPattern("remote", "logs-*"); @@ -648,7 +649,7 @@ void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List pa public void testAutoFollowerCreateAndFollowApiCallFailure() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterState("logs-20190101", true); AutoFollowPattern autoFollowPattern = createAutoFollowPattern("remote", "logs-*"); @@ -1673,7 +1674,7 @@ public void testUpdateAutoFollowersNoActivePatterns() { public void testWaitForMetadataVersion() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); AutoFollowPattern autoFollowPattern = createAutoFollowPattern("remote", "logs-*"); Map patterns = new HashMap<>(); @@ -1737,7 +1738,7 @@ void updateAutoFollowMetadata(Function updateFunctio public void testWaitForTimeOut() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); AutoFollowPattern autoFollowPattern = createAutoFollowPattern("remote", "logs-*"); Map patterns = new HashMap<>(); @@ -1789,7 +1790,7 @@ void updateAutoFollowMetadata(Function updateFunctio public void testAutoFollowerSoftDeletesDisabled() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterState("logs-20190101", false); @@ -1855,7 +1856,7 @@ void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List pa public void testAutoFollowerFollowerIndexAlreadyExists() { Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); ClusterState remoteState = createRemoteClusterState("logs-20190101", true); @@ -2022,7 +2023,7 @@ void updateAutoFollowMetadata( public void testClosedIndicesAreNotAutoFollowed() { final Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); final String pattern = "pattern1"; final ClusterState localState = ClusterState.builder(new ClusterName("local")) @@ -2117,7 +2118,7 @@ void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List pa public void testExcludedPatternIndicesAreNotAutoFollowed() { final Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); final String pattern = "pattern1"; final ClusterState localState = ClusterState.builder(new ClusterName("local")) @@ -2418,7 +2419,7 @@ private Tuple, Set> execute ClusterState finalRemoteState ) { final Client client = mock(Client.class); - when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client); + when(client.getRemoteClusterClient(anyString(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); final String pattern = "pattern1"; final ClusterState localState = ClusterState.builder(new ClusterName("local")) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java index 362f757204aa1..7d5d5cb5a40a0 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java @@ -42,7 +42,7 @@ import java.util.concurrent.CountDownLatch; import static java.util.Collections.emptySet; -import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -80,14 +80,12 @@ public void testDoNotFillGaps() throws Exception { // promote the replica to primary: final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = newShardRouting( + final ShardRouting primaryRouting = shardRoutingBuilder( replicaRouting.shardId(), replicaRouting.currentNodeId(), - null, true, - ShardRoutingState.STARTED, - replicaRouting.allocationId() - ); + ShardRoutingState.STARTED + ).withAllocationId(replicaRouting.allocationId()).build(); indexShard.updateShardState( primaryRouting, indexShard.getOperationPrimaryTerm() + 1, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java index 099f15eb59e46..98ceeb96db071 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -68,7 +69,7 @@ public void testWhenRetentionLeaseAlreadyExistsWeTryToRenewIt() { ); // simulate that the retention lease already exists on the leader, and verify that we attempt to renew it - final Client remoteClient = mock(Client.class); + final RemoteClusterClient remoteClient = mock(RemoteClusterClient.class); final ArgumentCaptor addRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.AddRequest.class ); @@ -77,7 +78,7 @@ public void testWhenRetentionLeaseAlreadyExistsWeTryToRenewIt() { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new RetentionLeaseAlreadyExistsException(retentionLeaseId)); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.ADD), addRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.REMOTE_ADD), addRequestCaptor.capture(), any()); final ArgumentCaptor renewRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.RenewRequest.class ); @@ -86,17 +87,17 @@ public void testWhenRetentionLeaseAlreadyExistsWeTryToRenewIt() { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(ActionResponse.Empty.INSTANCE); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.RENEW), renewRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.REMOTE_RENEW), renewRequestCaptor.capture(), any()); repository.acquireRetentionLeaseOnLeader(followerShardId, retentionLeaseId, leaderShardId, remoteClient); - verify(remoteClient).execute(same(RetentionLeaseActions.ADD), any(RetentionLeaseActions.AddRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.REMOTE_ADD), any(RetentionLeaseActions.AddRequest.class), any()); assertThat(addRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(addRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(addRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); assertThat(addRequestCaptor.getValue().getSource(), equalTo("ccr")); - verify(remoteClient).execute(same(RetentionLeaseActions.RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.REMOTE_RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); assertThat(renewRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(renewRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(renewRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); @@ -139,7 +140,7 @@ public void testWhenRetentionLeaseExpiresBeforeWeCanRenewIt() { ); // simulate that the retention lease already exists on the leader, expires before we renew, and verify that we attempt to add it - final Client remoteClient = mock(Client.class); + final RemoteClusterClient remoteClient = mock(RemoteClusterClient.class); final ArgumentCaptor addRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.AddRequest.class ); @@ -162,7 +163,7 @@ public Void answer(final InvocationOnMock invocationOnMock) { return null; } - }).when(remoteClient).execute(same(RetentionLeaseActions.ADD), addRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.REMOTE_ADD), addRequestCaptor.capture(), any()); final ArgumentCaptor renewRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.RenewRequest.class ); @@ -171,17 +172,17 @@ public Void answer(final InvocationOnMock invocationOnMock) { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new RetentionLeaseNotFoundException(retentionLeaseId)); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.RENEW), renewRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.REMOTE_RENEW), renewRequestCaptor.capture(), any()); repository.acquireRetentionLeaseOnLeader(followerShardId, retentionLeaseId, leaderShardId, remoteClient); - verify(remoteClient, times(2)).execute(same(RetentionLeaseActions.ADD), any(RetentionLeaseActions.AddRequest.class), any()); + verify(remoteClient, times(2)).execute(same(RetentionLeaseActions.REMOTE_ADD), any(RetentionLeaseActions.AddRequest.class), any()); assertThat(addRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(addRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(addRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); assertThat(addRequestCaptor.getValue().getSource(), equalTo("ccr")); - verify(remoteClient).execute(same(RetentionLeaseActions.RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.REMOTE_RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); assertThat(renewRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(renewRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(renewRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 7e7e04510e2a9..71dd5bed6cf11 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -44,6 +44,7 @@ dependencies { api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" + testImplementation project(path: ':modules:aggregations') testImplementation project(path: ':modules:data-streams') // security deps @@ -61,7 +62,6 @@ dependencies { testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') - testImplementation project(":client:rest-high-level") // Needed for Fips140ProviderVerificationTests testCompileOnly('org.bouncycastle:bc-fips:1.0.2.4') diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java index 3e5be8b4ae2ff..7261ee1f66036 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.cluster.routing.allocation; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; @@ -506,7 +505,7 @@ private DesiredNode desiredNode(String externalId, DiscoveryNodeRole... roles) { .put(NODE_EXTERNAL_ID_SETTING.getKey(), externalId) .put(NODE_NAME_SETTING.getKey(), externalId) .build(); - return new DesiredNode(settings, 1, ByteSizeValue.ONE, ByteSizeValue.ONE, Version.CURRENT); + return new DesiredNode(settings, 1, ByteSizeValue.ONE, ByteSizeValue.ONE); } private void updateDesiredNodes(DesiredNode... desiredNodes) { diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierTelemetryPlugin.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierTelemetryPlugin.java index 8639c9cc267fe..b67d4e417f97c 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierTelemetryPlugin.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierTelemetryPlugin.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.cluster.routing.allocation; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -24,7 +25,9 @@ import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import java.nio.file.Path; @@ -52,7 +55,7 @@ public DataTiersTransportXPackUsageAction( } @Override - protected List usageActions() { + protected List> usageActions() { return Collections.singletonList(XPackUsageFeatureAction.DATA_TIERS); } } @@ -69,7 +72,7 @@ public DataTiersTransportXPackInfoAction( } @Override - protected List infoActions() { + protected List> infoActions() { return Collections.singletonList(XPackInfoFeatureAction.DATA_TIERS); } } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java index faeb760b3c181..973b94f05c114 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.rest.action; import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; @@ -33,6 +34,7 @@ import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; @@ -129,7 +131,7 @@ public DataTiersOnlyTransportXPackUsageAction( } @Override - protected List usageActions() { + protected List> usageActions() { return List.of(XPackUsageFeatureAction.DATA_TIERS); } } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java index da30c54a7bb83..1e36dc5c603ab 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.TransportAction; @@ -37,7 +38,6 @@ import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; import org.elasticsearch.xpack.core.action.XPackUsageResponse; @@ -53,6 +53,7 @@ import static org.elasticsearch.action.support.ActionTestUtils.wrapAsRestResponseListener; import static org.elasticsearch.test.TaskAssertions.assertAllCancellableTasksAreCancelled; import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; +import static org.elasticsearch.xpack.core.action.XPackUsageFeatureAction.xpackUsageFeatureAction; import static org.hamcrest.core.IsEqual.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) @@ -100,8 +101,8 @@ public void testCancellation() throws Exception { } public static class BlockingUsageActionXPackPlugin extends LocalStateCompositeXPackPlugin { - public static final XPackUsageFeatureAction BLOCKING_XPACK_USAGE = new XPackUsageFeatureAction("blocking_xpack_usage"); - public static final XPackUsageFeatureAction NON_BLOCKING_XPACK_USAGE = new XPackUsageFeatureAction("regular_xpack_usage"); + public static final ActionType BLOCKING_XPACK_USAGE = xpackUsageFeatureAction("blocking_xpack_usage"); + public static final ActionType NON_BLOCKING_XPACK_USAGE = xpackUsageFeatureAction("regular_xpack_usage"); public BlockingUsageActionXPackPlugin(Settings settings, Path configPath) { super(settings, configPath); @@ -135,7 +136,7 @@ public ClusterBlockAwareTransportXPackUsageAction( } @Override - protected List usageActions() { + protected List> usageActions() { return List.of(BlockingUsageActionXPackPlugin.BLOCKING_XPACK_USAGE, BlockingUsageActionXPackPlugin.NON_BLOCKING_XPACK_USAGE); } } diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index f747d07224454..daa3a4db913ef 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -81,7 +81,6 @@ exports org.elasticsearch.xpack.core.ml.annotations; exports org.elasticsearch.xpack.core.ml.autoscaling; exports org.elasticsearch.xpack.core.ml.calendars; - exports org.elasticsearch.xpack.core.ml.datafeed.extractor; exports org.elasticsearch.xpack.core.ml.datafeed; exports org.elasticsearch.xpack.core.ml.dataframe.analyses; exports org.elasticsearch.xpack.core.ml.dataframe.evaluation.classification; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java index b5e34f20002d7..3b581ec123a71 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java @@ -6,17 +6,17 @@ */ package org.elasticsearch.license; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; public class DeleteLicenseRequestBuilder extends AcknowledgedRequestBuilder< - DeleteLicenseRequest, + AcknowledgedRequest.Plain, AcknowledgedResponse, DeleteLicenseRequestBuilder> { public DeleteLicenseRequestBuilder(ElasticsearchClient client) { - super(client, TransportDeleteLicenseAction.TYPE, new DeleteLicenseRequest()); + super(client, TransportDeleteLicenseAction.TYPE, new AcknowledgedRequest.Plain()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java index 6fb97f7c5e1e6..a53044f5209ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java @@ -14,6 +14,6 @@ public class GetBasicStatusAction extends ActionType { public static final String NAME = "cluster:admin/xpack/license/basic_status"; private GetBasicStatusAction() { - super(NAME, GetBasicStatusResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java index c987374b5bbf8..4125856990a44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java @@ -14,6 +14,6 @@ public class GetLicenseAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/license/get"; private GetLicenseAction() { - super(NAME, GetLicenseResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java index 8649fe2506ace..aa5d03b2648e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java @@ -14,6 +14,6 @@ public class GetTrialStatusAction extends ActionType { public static final String NAME = "cluster:admin/xpack/license/trial_status"; private GetTrialStatusAction() { - super(NAME, GetTrialStatusResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java index c7be971ba4bfd..4532a00d95717 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java @@ -14,6 +14,6 @@ public class PostStartBasicAction extends ActionType { public static final String NAME = "cluster:admin/xpack/license/start_basic"; private PostStartBasicAction() { - super(NAME, PostStartBasicResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java index 4f9845a00bc0d..602e521fe10e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.license; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -24,11 +23,6 @@ public PostStartBasicRequest(StreamInput in) throws IOException { acknowledge = in.readBoolean(); } - @Override - public ActionRequestValidationException validate() { - return null; - } - public PostStartBasicRequest acknowledge(boolean acknowledge) { this.acknowledge = acknowledge; return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java index 8e3057b3af1c1..469e69c5798d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java @@ -14,6 +14,6 @@ public class PostStartTrialAction extends ActionType { public static final String NAME = "cluster:admin/xpack/license/start_trial"; private PostStartTrialAction() { - super(NAME, PostStartTrialResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java index f98fa7c566c62..e6da3bc5a3eb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java @@ -15,6 +15,6 @@ public class PutLicenseAction extends ActionType { public static final String NAME = "cluster:admin/xpack/license/put"; private PutLicenseAction() { - super(NAME, PutLicenseResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java index dc69cd0294f2d..18125592a1f51 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java @@ -221,7 +221,7 @@ private void remoteClusterLicense(final String clusterAlias, final ActionListene request.setCategories(EnumSet.of(XPackInfoRequest.Category.LICENSE)); try { client.getRemoteClusterClient(clusterAlias, remoteClientResponseExecutor) - .execute(XPackInfoAction.INSTANCE, request, contextPreservingActionListener); + .execute(XPackInfoAction.REMOTE_TYPE, request, contextPreservingActionListener); } catch (final Exception e) { contextPreservingActionListener.onFailure(e); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index 1a2ed66f6d5f5..cc7dace36e08e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -7,9 +7,9 @@ package org.elasticsearch.license; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -35,7 +35,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - DeleteLicenseRequest deleteLicenseRequest = new DeleteLicenseRequest(); + AcknowledgedRequest.Plain deleteLicenseRequest = new AcknowledgedRequest.Plain(); deleteLicenseRequest.timeout(request.paramAsTime("timeout", deleteLicenseRequest.timeout())); deleteLicenseRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteLicenseRequest.masterNodeTimeout())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java index 9d11cf2b59fec..7ac59e1dc327a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -19,14 +20,13 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.license.internal.MutableLicenseService; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportDeleteLicenseAction extends AcknowledgedTransportMasterNodeAction { +public class TransportDeleteLicenseAction extends AcknowledgedTransportMasterNodeAction { - public static final ActionType TYPE = ActionType.localOnly("cluster:admin/xpack/license/delete"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/license/delete"); private final MutableLicenseService licenseService; @Inject @@ -44,7 +44,7 @@ public TransportDeleteLicenseAction( clusterService, threadPool, actionFilters, - DeleteLicenseRequest::new, + AcknowledgedRequest.Plain::new, indexNameExpressionResolver, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); @@ -52,14 +52,14 @@ public TransportDeleteLicenseAction( } @Override - protected ClusterBlockException checkBlock(DeleteLicenseRequest request, ClusterState state) { + protected ClusterBlockException checkBlock(AcknowledgedRequest.Plain request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @Override protected void masterOperation( Task task, - final DeleteLicenseRequest request, + final AcknowledgedRequest.Plain request, ClusterState state, final ActionListener listener ) throws ElasticsearchException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java index 6be7e73058363..00023f9c30e70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java @@ -25,10 +25,7 @@ public class TransportGetFeatureUsageAction extends HandledTransportAction { - public static final ActionType TYPE = new ActionType<>( - "cluster:admin/xpack/license/feature_usage", - GetFeatureUsageResponse::new - ); + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/license/feature_usage"); private final XPackLicenseState licenseState; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java deleted file mode 100644 index b4f0642d37075..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.protocol.xpack.license; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -public class DeleteLicenseRequest extends AcknowledgedRequest { - - public DeleteLicenseRequest() {} - - public DeleteLicenseRequest(StreamInput in) throws IOException { - super(in); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersAction.java index 5d973c48ff7bf..fc9af342e31d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersAction.java @@ -15,7 +15,7 @@ public class MigrateToDataTiersAction extends ActionType void executeAsyncWithOrigin( + public static void executeAsyncWithOrigin( ThreadContext threadContext, String origin, Request request, @@ -227,21 +226,14 @@ public static v * Executes an asynchronous action using the provided client. The origin is set in the context and the listener * is wrapped to ensure the proper context is restored */ - public static < - Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder> void executeAsyncWithOrigin( - Client client, - String origin, - ActionType action, - Request request, - ActionListener listener - ) { - final ThreadContext threadContext = client.threadPool().getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(false); - try (ThreadContext.StoredContext ignore = threadContext.stashWithOrigin(origin)) { - client.execute(action, request, new ContextPreservingActionListener<>(supplier, listener)); - } + public static void executeAsyncWithOrigin( + Client client, + String origin, + ActionType action, + Request request, + ActionListener listener + ) { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), origin, request, listener, (r, l) -> client.execute(action, r, l)); } /** @@ -303,18 +295,35 @@ public static v ActionType action, Request request, ActionListener listener + ) { + executeWithHeadersAsync( + client.threadPool().getThreadContext(), + headers, + origin, + request, + listener, + (r, l) -> client.execute(action, r, l) + ); + } + + public static void executeWithHeadersAsync( + ThreadContext threadContext, + Map headers, + String origin, + Request request, + ActionListener listener, + BiConsumer> consumer ) { // No need to rewrite authentication header because it will be handled by Security Interceptor final Map filteredHeaders = filterSecurityHeaders(headers); - final ThreadContext threadContext = client.threadPool().getThreadContext(); // No headers (e.g. security not installed/in use) so execute as origin if (filteredHeaders.isEmpty()) { - ClientHelper.executeAsyncWithOrigin(client, origin, action, request, listener); + executeAsyncWithOrigin(threadContext, origin, request, listener, consumer); } else { // Otherwise stash the context and copy in the saved headers before executing final Supplier supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithHeaders(threadContext, filteredHeaders)) { - client.execute(action, request, new ContextPreservingActionListener<>(supplier, listener)); + consumer.accept(request, new ContextPreservingActionListener<>(supplier, listener)); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 5960c4c6f79d2..31c772f96f889 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; @@ -381,6 +382,7 @@ protected Class> @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java index f5dbeddaf6ee1..3d46b2dd5070f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -63,11 +62,6 @@ public boolean shouldDeleteMetadata() { return deleteMetadata; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index eae1328e0d516..46bef2214de99 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -31,7 +32,7 @@ public class TransportXPackInfoAction extends HandledTransportAction infoActions; + private final List> infoActions; @SuppressWarnings("this-escape") @Inject @@ -48,7 +49,7 @@ public TransportXPackInfoAction( } // overrideable for tests - protected List infoActions() { + protected List> infoActions() { return XPackInfoFeatureAction.ALL; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index d67002fba8a7d..8eddfecf0c92c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -28,7 +29,7 @@ public class TransportXPackUsageAction extends TransportMasterNodeAction { private final NodeClient client; - private final List usageActions; + private final List> usageActions; @SuppressWarnings("this-escape") @Inject @@ -56,7 +57,7 @@ public TransportXPackUsageAction( } // overrideable for tests - protected List usageActions() { + protected List> usageActions() { return XPackUsageFeatureAction.ALL; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java index 8c47a7f517c42..1f3ba1de07e29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java @@ -7,14 +7,17 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.protocol.xpack.XPackInfoResponse; -public class XPackInfoAction extends ActionType { +public class XPackInfoAction { public static final String NAME = "cluster:monitor/xpack/info"; - public static final XPackInfoAction INSTANCE = new XPackInfoAction(); + public static final ActionType INSTANCE = new ActionType<>(NAME); + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + XPackInfoResponse::new + ); - public XPackInfoAction() { - super(NAME, XPackInfoResponse::new); - } + private XPackInfoAction() {/* no instances */} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index 859950470f0e3..38b0d1a693e64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -9,9 +9,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.xpack.core.XPackField; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; /** @@ -21,79 +18,69 @@ * {@link XPackInfoAction} implementation iterates over the {@link #ALL} list of actions to form * the complete info result. */ -public class XPackInfoFeatureAction extends ActionType { +public class XPackInfoFeatureAction { private static final String BASE_NAME = "cluster:monitor/xpack/info/"; - public static final XPackInfoFeatureAction SECURITY = new XPackInfoFeatureAction(XPackField.SECURITY); - public static final XPackInfoFeatureAction MONITORING = new XPackInfoFeatureAction(XPackField.MONITORING); - public static final XPackInfoFeatureAction WATCHER = new XPackInfoFeatureAction(XPackField.WATCHER); - public static final XPackInfoFeatureAction GRAPH = new XPackInfoFeatureAction(XPackField.GRAPH); - public static final XPackInfoFeatureAction MACHINE_LEARNING = new XPackInfoFeatureAction(XPackField.MACHINE_LEARNING); - public static final XPackInfoFeatureAction LOGSTASH = new XPackInfoFeatureAction(XPackField.LOGSTASH); - public static final XPackInfoFeatureAction EQL = new XPackInfoFeatureAction(XPackField.EQL); - public static final XPackInfoFeatureAction ESQL = new XPackInfoFeatureAction(XPackField.ESQL); - public static final XPackInfoFeatureAction SQL = new XPackInfoFeatureAction(XPackField.SQL); - public static final XPackInfoFeatureAction ROLLUP = new XPackInfoFeatureAction(XPackField.ROLLUP); - public static final XPackInfoFeatureAction INDEX_LIFECYCLE = new XPackInfoFeatureAction(XPackField.INDEX_LIFECYCLE); - public static final XPackInfoFeatureAction SNAPSHOT_LIFECYCLE = new XPackInfoFeatureAction(XPackField.SNAPSHOT_LIFECYCLE); - public static final XPackInfoFeatureAction CCR = new XPackInfoFeatureAction(XPackField.CCR); - public static final XPackInfoFeatureAction TRANSFORM = new XPackInfoFeatureAction(XPackField.TRANSFORM); - public static final XPackInfoFeatureAction VOTING_ONLY = new XPackInfoFeatureAction(XPackField.VOTING_ONLY); - public static final XPackInfoFeatureAction FROZEN_INDICES = new XPackInfoFeatureAction(XPackField.FROZEN_INDICES); - public static final XPackInfoFeatureAction SPATIAL = new XPackInfoFeatureAction(XPackField.SPATIAL); - public static final XPackInfoFeatureAction ANALYTICS = new XPackInfoFeatureAction(XPackField.ANALYTICS); - public static final XPackInfoFeatureAction ENRICH = new XPackInfoFeatureAction(XPackField.ENRICH); - public static final XPackInfoFeatureAction SEARCHABLE_SNAPSHOTS = new XPackInfoFeatureAction(XPackField.SEARCHABLE_SNAPSHOTS); - public static final XPackInfoFeatureAction DATA_STREAMS = new XPackInfoFeatureAction(XPackField.DATA_STREAMS); - public static final XPackInfoFeatureAction DATA_TIERS = new XPackInfoFeatureAction(XPackField.DATA_TIERS); - public static final XPackInfoFeatureAction AGGREGATE_METRIC = new XPackInfoFeatureAction(XPackField.AGGREGATE_METRIC); - public static final XPackInfoFeatureAction ARCHIVE = new XPackInfoFeatureAction(XPackField.ARCHIVE); - public static final XPackInfoFeatureAction ENTERPRISE_SEARCH = new XPackInfoFeatureAction(XPackField.ENTERPRISE_SEARCH); - public static final XPackInfoFeatureAction UNIVERSAL_PROFILING = new XPackInfoFeatureAction(XPackField.UNIVERSAL_PROFILING); + public static final ActionType SECURITY = xpackInfoFeatureAction(XPackField.SECURITY); + public static final ActionType MONITORING = xpackInfoFeatureAction(XPackField.MONITORING); + public static final ActionType WATCHER = xpackInfoFeatureAction(XPackField.WATCHER); + public static final ActionType GRAPH = xpackInfoFeatureAction(XPackField.GRAPH); + public static final ActionType MACHINE_LEARNING = xpackInfoFeatureAction(XPackField.MACHINE_LEARNING); + public static final ActionType LOGSTASH = xpackInfoFeatureAction(XPackField.LOGSTASH); + public static final ActionType EQL = xpackInfoFeatureAction(XPackField.EQL); + public static final ActionType ESQL = xpackInfoFeatureAction(XPackField.ESQL); + public static final ActionType SQL = xpackInfoFeatureAction(XPackField.SQL); + public static final ActionType ROLLUP = xpackInfoFeatureAction(XPackField.ROLLUP); + public static final ActionType INDEX_LIFECYCLE = xpackInfoFeatureAction(XPackField.INDEX_LIFECYCLE); + public static final ActionType SNAPSHOT_LIFECYCLE = xpackInfoFeatureAction(XPackField.SNAPSHOT_LIFECYCLE); + public static final ActionType CCR = xpackInfoFeatureAction(XPackField.CCR); + public static final ActionType TRANSFORM = xpackInfoFeatureAction(XPackField.TRANSFORM); + public static final ActionType VOTING_ONLY = xpackInfoFeatureAction(XPackField.VOTING_ONLY); + public static final ActionType FROZEN_INDICES = xpackInfoFeatureAction(XPackField.FROZEN_INDICES); + public static final ActionType SPATIAL = xpackInfoFeatureAction(XPackField.SPATIAL); + public static final ActionType ANALYTICS = xpackInfoFeatureAction(XPackField.ANALYTICS); + public static final ActionType ENRICH = xpackInfoFeatureAction(XPackField.ENRICH); + public static final ActionType SEARCHABLE_SNAPSHOTS = xpackInfoFeatureAction(XPackField.SEARCHABLE_SNAPSHOTS); + public static final ActionType DATA_STREAMS = xpackInfoFeatureAction(XPackField.DATA_STREAMS); + public static final ActionType DATA_TIERS = xpackInfoFeatureAction(XPackField.DATA_TIERS); + public static final ActionType AGGREGATE_METRIC = xpackInfoFeatureAction(XPackField.AGGREGATE_METRIC); + public static final ActionType ARCHIVE = xpackInfoFeatureAction(XPackField.ARCHIVE); + public static final ActionType ENTERPRISE_SEARCH = xpackInfoFeatureAction(XPackField.ENTERPRISE_SEARCH); + public static final ActionType UNIVERSAL_PROFILING = xpackInfoFeatureAction(XPackField.UNIVERSAL_PROFILING); - public static final List ALL; - static { - final List actions = new ArrayList<>(); - actions.addAll( - Arrays.asList( - SECURITY, - MONITORING, - WATCHER, - GRAPH, - MACHINE_LEARNING, - LOGSTASH, - EQL, - ESQL, - SQL, - ROLLUP, - INDEX_LIFECYCLE, - SNAPSHOT_LIFECYCLE, - CCR, - TRANSFORM, - VOTING_ONLY, - FROZEN_INDICES, - SPATIAL, - ANALYTICS, - ENRICH, - DATA_STREAMS, - SEARCHABLE_SNAPSHOTS, - DATA_TIERS, - AGGREGATE_METRIC, - ARCHIVE, - ENTERPRISE_SEARCH, - UNIVERSAL_PROFILING - ) - ); - ALL = Collections.unmodifiableList(actions); - } + public static final List> ALL = List.of( + SECURITY, + MONITORING, + WATCHER, + GRAPH, + MACHINE_LEARNING, + LOGSTASH, + EQL, + ESQL, + SQL, + ROLLUP, + INDEX_LIFECYCLE, + SNAPSHOT_LIFECYCLE, + CCR, + TRANSFORM, + VOTING_ONLY, + FROZEN_INDICES, + SPATIAL, + ANALYTICS, + ENRICH, + DATA_STREAMS, + SEARCHABLE_SNAPSHOTS, + DATA_TIERS, + AGGREGATE_METRIC, + ARCHIVE, + ENTERPRISE_SEARCH, + UNIVERSAL_PROFILING + ); - private XPackInfoFeatureAction(String name) { - super(BASE_NAME + name, XPackInfoFeatureResponse::new); + public static ActionType xpackInfoFeatureAction(String suffix) { + return new ActionType<>(BASE_NAME + suffix); } - @Override - public String toString() { - return "ActionType [" + name() + "]"; - } + private XPackInfoFeatureAction() {/* no instances */} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java index e70118ca7578f..059278b582824 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java @@ -8,12 +8,10 @@ import org.elasticsearch.action.ActionType; -public class XPackUsageAction extends ActionType { +public class XPackUsageAction { public static final String NAME = "cluster:monitor/xpack/usage"; - public static final XPackUsageAction INSTANCE = new XPackUsageAction(); + public static final ActionType INSTANCE = new ActionType<>(NAME); - public XPackUsageAction() { - super(NAME, XPackUsageResponse::new); - } + private XPackUsageAction() {/* no instances */} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index c0e6d96c1569a..b8ca43e46ee29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -18,42 +18,48 @@ * {@link XPackUsageAction} implementation iterates over the {@link #ALL} list of actions to form * the complete usage result. */ -public class XPackUsageFeatureAction extends ActionType { +public final class XPackUsageFeatureAction { + + private XPackUsageFeatureAction() {/* no instances */} private static final String BASE_NAME = "cluster:monitor/xpack/usage/"; - public static final XPackUsageFeatureAction SECURITY = new XPackUsageFeatureAction(XPackField.SECURITY); - public static final XPackUsageFeatureAction MONITORING = new XPackUsageFeatureAction(XPackField.MONITORING); - public static final XPackUsageFeatureAction WATCHER = new XPackUsageFeatureAction(XPackField.WATCHER); - public static final XPackUsageFeatureAction GRAPH = new XPackUsageFeatureAction(XPackField.GRAPH); - public static final XPackUsageFeatureAction MACHINE_LEARNING = new XPackUsageFeatureAction(XPackField.MACHINE_LEARNING); - public static final XPackUsageFeatureAction INFERENCE = new XPackUsageFeatureAction(XPackField.INFERENCE); - public static final XPackUsageFeatureAction LOGSTASH = new XPackUsageFeatureAction(XPackField.LOGSTASH); - public static final XPackUsageFeatureAction EQL = new XPackUsageFeatureAction(XPackField.EQL); - public static final XPackUsageFeatureAction ESQL = new XPackUsageFeatureAction(XPackField.ESQL); - public static final XPackUsageFeatureAction SQL = new XPackUsageFeatureAction(XPackField.SQL); - public static final XPackUsageFeatureAction ROLLUP = new XPackUsageFeatureAction(XPackField.ROLLUP); - public static final XPackUsageFeatureAction INDEX_LIFECYCLE = new XPackUsageFeatureAction(XPackField.INDEX_LIFECYCLE); - public static final XPackUsageFeatureAction SNAPSHOT_LIFECYCLE = new XPackUsageFeatureAction(XPackField.SNAPSHOT_LIFECYCLE); - public static final XPackUsageFeatureAction CCR = new XPackUsageFeatureAction(XPackField.CCR); - public static final XPackUsageFeatureAction TRANSFORM = new XPackUsageFeatureAction(XPackField.TRANSFORM); - public static final XPackUsageFeatureAction VOTING_ONLY = new XPackUsageFeatureAction(XPackField.VOTING_ONLY); - public static final XPackUsageFeatureAction FROZEN_INDICES = new XPackUsageFeatureAction(XPackField.FROZEN_INDICES); - public static final XPackUsageFeatureAction SPATIAL = new XPackUsageFeatureAction(XPackField.SPATIAL); - public static final XPackUsageFeatureAction ANALYTICS = new XPackUsageFeatureAction(XPackField.ANALYTICS); - public static final XPackUsageFeatureAction ENRICH = new XPackUsageFeatureAction(XPackField.ENRICH); - public static final XPackUsageFeatureAction SEARCHABLE_SNAPSHOTS = new XPackUsageFeatureAction(XPackField.SEARCHABLE_SNAPSHOTS); - public static final XPackUsageFeatureAction DATA_STREAMS = new XPackUsageFeatureAction(XPackField.DATA_STREAMS); - public static final XPackUsageFeatureAction DATA_STREAM_LIFECYCLE = new XPackUsageFeatureAction(XPackField.DATA_STREAM_LIFECYCLE); - public static final XPackUsageFeatureAction DATA_TIERS = new XPackUsageFeatureAction(XPackField.DATA_TIERS); - public static final XPackUsageFeatureAction AGGREGATE_METRIC = new XPackUsageFeatureAction(XPackField.AGGREGATE_METRIC); - public static final XPackUsageFeatureAction ARCHIVE = new XPackUsageFeatureAction(XPackField.ARCHIVE); - public static final XPackUsageFeatureAction HEALTH = new XPackUsageFeatureAction(XPackField.HEALTH_API); - public static final XPackUsageFeatureAction REMOTE_CLUSTERS = new XPackUsageFeatureAction(XPackField.REMOTE_CLUSTERS); - public static final XPackUsageFeatureAction ENTERPRISE_SEARCH = new XPackUsageFeatureAction(XPackField.ENTERPRISE_SEARCH); - public static final XPackUsageFeatureAction UNIVERSAL_PROFILING = new XPackUsageFeatureAction(XPackField.UNIVERSAL_PROFILING); + public static final ActionType SECURITY = xpackUsageFeatureAction(XPackField.SECURITY); + public static final ActionType MONITORING = xpackUsageFeatureAction(XPackField.MONITORING); + public static final ActionType WATCHER = xpackUsageFeatureAction(XPackField.WATCHER); + public static final ActionType GRAPH = xpackUsageFeatureAction(XPackField.GRAPH); + public static final ActionType MACHINE_LEARNING = xpackUsageFeatureAction(XPackField.MACHINE_LEARNING); + public static final ActionType INFERENCE = xpackUsageFeatureAction(XPackField.INFERENCE); + public static final ActionType LOGSTASH = xpackUsageFeatureAction(XPackField.LOGSTASH); + public static final ActionType EQL = xpackUsageFeatureAction(XPackField.EQL); + public static final ActionType ESQL = xpackUsageFeatureAction(XPackField.ESQL); + public static final ActionType SQL = xpackUsageFeatureAction(XPackField.SQL); + public static final ActionType ROLLUP = xpackUsageFeatureAction(XPackField.ROLLUP); + public static final ActionType INDEX_LIFECYCLE = xpackUsageFeatureAction(XPackField.INDEX_LIFECYCLE); + public static final ActionType SNAPSHOT_LIFECYCLE = xpackUsageFeatureAction(XPackField.SNAPSHOT_LIFECYCLE); + public static final ActionType CCR = xpackUsageFeatureAction(XPackField.CCR); + public static final ActionType TRANSFORM = xpackUsageFeatureAction(XPackField.TRANSFORM); + public static final ActionType VOTING_ONLY = xpackUsageFeatureAction(XPackField.VOTING_ONLY); + public static final ActionType FROZEN_INDICES = xpackUsageFeatureAction(XPackField.FROZEN_INDICES); + public static final ActionType SPATIAL = xpackUsageFeatureAction(XPackField.SPATIAL); + public static final ActionType ANALYTICS = xpackUsageFeatureAction(XPackField.ANALYTICS); + public static final ActionType ENRICH = xpackUsageFeatureAction(XPackField.ENRICH); + public static final ActionType SEARCHABLE_SNAPSHOTS = xpackUsageFeatureAction( + XPackField.SEARCHABLE_SNAPSHOTS + ); + public static final ActionType DATA_STREAMS = xpackUsageFeatureAction(XPackField.DATA_STREAMS); + public static final ActionType DATA_STREAM_LIFECYCLE = xpackUsageFeatureAction( + XPackField.DATA_STREAM_LIFECYCLE + ); + public static final ActionType DATA_TIERS = xpackUsageFeatureAction(XPackField.DATA_TIERS); + public static final ActionType AGGREGATE_METRIC = xpackUsageFeatureAction(XPackField.AGGREGATE_METRIC); + public static final ActionType ARCHIVE = xpackUsageFeatureAction(XPackField.ARCHIVE); + public static final ActionType HEALTH = xpackUsageFeatureAction(XPackField.HEALTH_API); + public static final ActionType REMOTE_CLUSTERS = xpackUsageFeatureAction(XPackField.REMOTE_CLUSTERS); + public static final ActionType ENTERPRISE_SEARCH = xpackUsageFeatureAction(XPackField.ENTERPRISE_SEARCH); + public static final ActionType UNIVERSAL_PROFILING = xpackUsageFeatureAction(XPackField.UNIVERSAL_PROFILING); - static final List ALL = List.of( + static final List> ALL = List.of( AGGREGATE_METRIC, ANALYTICS, CCR, @@ -85,13 +91,7 @@ public class XPackUsageFeatureAction extends ActionType xpackUsageFeatureAction(String suffix) { + return new ActionType<>(BASE_NAME + suffix); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java index 67a21aa324b48..586033e4c8f41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -19,7 +20,7 @@ public XPackUsageRequestBuilder(ElasticsearchClient client) { this(client, XPackUsageAction.INSTANCE); } - public XPackUsageRequestBuilder(ElasticsearchClient client, XPackUsageAction action) { + public XPackUsageRequestBuilder(ElasticsearchClient client, ActionType action) { super(client, action, new XPackUsageRequest()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java index 48a248d075ca5..14fb7c7bf37fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java @@ -33,7 +33,7 @@ public class AnalyticsStatsAction extends ActionType { - public static final ActionType TYPE = ActionType.localOnly("indices:data/read/async_search/delete"); + public static final ActionType TYPE = new ActionType<>("indices:data/read/async_search/delete"); private final DeleteAsyncResultsService deleteResultsService; private final ClusterService clusterService; private final TransportService transportService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java index 1c0204618bd45..300d2844b7a2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java @@ -25,7 +25,7 @@ public class ActivateAutoFollowPatternAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index 225c784ef1431..b6c078134634b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -29,7 +29,7 @@ public class CcrStatsAction extends ActionType { public static final CcrStatsAction INSTANCE = new CcrStatsAction(); private CcrStatsAction() { - super(NAME, CcrStatsAction.Response::new); + super(NAME); } public static class Request extends MasterNodeRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index 44c815ac986db..8e7e9f8605245 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -24,7 +24,7 @@ public class DeleteAutoFollowPatternAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java index b6e771e07ac46..c405e4e81ff19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -34,7 +34,7 @@ public class FollowInfoAction extends ActionType { public static final FollowInfoAction INSTANCE = new FollowInfoAction(); private FollowInfoAction() { - super(NAME, FollowInfoAction.Response::new); + super(NAME); } public static class Request extends MasterNodeReadRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java index 726257910c9a5..4dde222245334 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java @@ -42,7 +42,7 @@ public class FollowStatsAction extends ActionType innerToXContentChunked(Map Iterators.concat( Iterators.single( - (builder, params) -> builder.startObject().field("index", indexEntry.getKey()).startArray("shards") + (builder, params) -> builder.startObject() + .field("index", indexEntry.getKey()) + .field("total_global_checkpoint_lag", calcFollowerToLeaderLaggingOps(indexEntry.getValue())) + .startArray("shards") ), indexEntry.getValue().values().iterator(), Iterators.single((builder, params) -> builder.endArray().endObject()) @@ -102,6 +105,14 @@ private static Iterator innerToXContentChunked(Map followShardTaskStats) { + return followShardTaskStats.values() + .stream() + .map(StatsResponse::status) + .mapToLong(s -> s.leaderGlobalCheckpoint() - s.followerGlobalCheckpoint()) + .sum(); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java index ed36f3cef5f52..3a3f9f1f8e00f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java @@ -25,7 +25,7 @@ public class ForgetFollowerAction extends ActionType { public static final ForgetFollowerAction INSTANCE = new ForgetFollowerAction(); private ForgetFollowerAction() { - super(NAME, BroadcastResponse::new); + super(NAME); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index 8a327735052b5..70f4f256c87e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -27,7 +27,7 @@ public class GetAutoFollowPatternAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index c38cb391d7cc5..7ad8e5881e443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -23,7 +23,7 @@ public class PauseFollowAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ccr/pause_follow"; private PauseFollowAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 8154067e72b18..92902aa9962ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -37,7 +37,7 @@ public class PutAutoFollowPatternAction extends ActionType private static final int MAX_NAME_BYTES = 255; private PutAutoFollowPatternAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index fe5a224f85e00..c2e1048541a47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -37,7 +37,7 @@ public final class PutFollowAction extends ActionType public static final String NAME = "indices:admin/xpack/ccr/put_follow"; private PutFollowAction() { - super(NAME, PutFollowAction.Response::new); + super(NAME); } public static final class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 0e8c95bde2bba..4cd84733b19e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -29,7 +29,7 @@ public final class ResumeFollowAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ccr/resume_follow"; private ResumeFollowAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index 630496c822050..808df5f8bccb0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -26,7 +26,7 @@ public class UnfollowAction extends ActionType { public static final String NAME = "indices:admin/xpack/ccr/unfollow"; private UnfollowAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements IndicesRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java index edac3498ca4e4..91cce4126d3a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java @@ -48,7 +48,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_020; + return TransportVersions.V_8_9_X; } @Override @@ -112,7 +112,7 @@ public LifecycleStats( } public static LifecycleStats read(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { return new LifecycleStats(in.readVLong(), in.readVLong(), in.readVLong(), in.readDouble(), in.readBoolean()); } else { return INITIAL; @@ -121,7 +121,7 @@ public static LifecycleStats read(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeVLong(dataStreamsWithLifecyclesCount); out.writeVLong(minRetentionMillis); out.writeVLong(maxRetentionMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java index 06a3b47d47a65..08a2d5ae4f5b4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -55,7 +55,7 @@ public class NodesDataTiersUsageTransportAction extends TransportNodesAction< NodesDataTiersUsageTransportAction.NodeRequest, NodeDataTiersUsage> { - public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/data_tier_usage"); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/data_tier_usage"); public static final NodeFeature LOCALLY_PRECALCULATED_STATS_FEATURE = new NodeFeature("usage.data_tiers.precalculate_stats"); private static final CommonStatsFlags STATS_FLAGS = new CommonStatsFlags().clear() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java index ef93ab914f08f..708a746d61414 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java @@ -37,7 +37,7 @@ public class DownsampleIndexerAction extends ActionType implements IndicesRequest, ToXContentObject { @@ -69,7 +69,7 @@ public Request() {} public Request(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) && in.readBoolean()) { this.indexStartTimeMillis = in.readVLong(); this.indexEndTimeMillis = in.readVLong(); } else { @@ -132,7 +132,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(true); out.writeVLong(indexStartTimeMillis); out.writeVLong(indexEndTimeMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java index 2700ed844d063..8d1d4aec6e7c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java @@ -144,7 +144,7 @@ public DownsampleShardStatus(StreamInput in) throws IOException { numSent = in.readLong(); numIndexed = in.readLong(); numFailed = in.readLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) && in.readBoolean()) { totalShardDocCount = in.readVLong(); lastSourceTimestamp = in.readVLong(); lastTargetTimestamp = in.readVLong(); @@ -254,7 +254,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(numSent); out.writeLong(numIndexed); out.writeLong(numFailed); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(true); out.writeVLong(totalShardDocCount); out.writeVLong(lastSourceTimestamp); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java index f7571c115ec38..e444232291101 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java @@ -22,7 +22,7 @@ public class DeleteEnrichPolicyAction extends ActionType { public static final String NAME = "cluster:admin/xpack/enrich/delete"; private DeleteEnrichPolicyAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index 4ece0f1a62e2a..c0d96347ccae4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -28,7 +28,7 @@ public class EnrichStatsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/enrich/stats"; private EnrichStatsAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends MasterNodeRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java index ba802f9bf9ea8..779ea535f74d9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java @@ -25,7 +25,7 @@ public class ExecuteEnrichPolicyAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index d7428ce2e4a26..ef8229b407b56 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -31,7 +31,7 @@ public class GetEnrichPolicyAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index ec1b04e453bb5..4ebbb75239879 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -24,7 +24,7 @@ public class PutEnrichPolicyAction extends ActionType { public static final String NAME = "cluster:admin/xpack/enrich/put"; private PutEnrichPolicyAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static Request fromXContent(XContentParser parser, String name) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlFeatureSetUsage.java index c5c5ff708c0b7..5707bc054e58f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlFeatureSetUsage.java @@ -63,7 +63,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_062; + return TransportVersions.V_8_11_X; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java index 5fa759268555a..d4d76200c25be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java @@ -15,6 +15,6 @@ public class FreezeIndexAction extends ActionType { public static final String NAME = "indices:admin/freeze"; private FreezeIndexAction() { - super(NAME, FreezeResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java index 9a1b01a86ed86..9242d2ea5ce83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java @@ -15,6 +15,6 @@ public class GraphExploreAction extends ActionType { public static final String NAME = "indices:data/read/xpack/graph/explore"; private GraphExploreAction() { - super(NAME, GraphExploreResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index 818b45c2b5d00..59ff38b317327 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -91,7 +91,7 @@ public DownsampleAction(final DateHistogramInterval fixedInterval, final TimeVal public DownsampleAction(StreamInput in) throws IOException { this( new DateHistogramInterval(in), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? TimeValue.parseTimeValue(in.readString(), WAIT_TIMEOUT_FIELD.getPreferredName()) : DEFAULT_WAIT_TIMEOUT ); @@ -100,7 +100,7 @@ public DownsampleAction(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { fixedInterval.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(waitTimeout.getStringRep()); } else { out.writeString(DEFAULT_WAIT_TIMEOUT.getStringRep()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java index c36d73e8d12d6..1072e6ee4c899 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java @@ -18,8 +18,6 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.core.Nullable; -import java.util.Objects; - import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentILMMode; import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentSLMMode; @@ -157,23 +155,4 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) logger.info("SLM operation mode updated to {}", slmMode); } } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), ilmMode, slmMode); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != getClass()) { - return false; - } - OperationModeUpdateTask other = (OperationModeUpdateTask) obj; - return Objects.equals(priority(), other.priority()) - && Objects.equals(ilmMode, other.ilmMode) - && Objects.equals(slmMode, other.slmMode); - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StartILMRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StartILMRequest.java index 03d647e867698..99fdbc3786614 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StartILMRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StartILMRequest.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; @@ -22,11 +21,6 @@ public StartILMRequest(StreamInput in) throws IOException { public StartILMRequest() {} - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return 64; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StopILMRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StopILMRequest.java index 7588c209a0d44..6118f02690082 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StopILMRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StopILMRequest.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; @@ -22,11 +21,6 @@ public StopILMRequest(StreamInput in) throws IOException { public StopILMRequest() {} - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return 75; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index f3d47cea1f39f..acb36bd015e4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -10,8 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.rollover.Condition; -import org.elasticsearch.action.admin.indices.rollover.MaxPrimaryShardDocsCondition; import org.elasticsearch.action.admin.indices.rollover.RolloverConditions; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.client.internal.Client; @@ -25,9 +23,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.xpack.core.ilm.step.info.EmptyInfo; -import java.util.HashMap; import java.util.Locale; -import java.util.Map; import java.util.Objects; /** @@ -233,20 +229,28 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, RolloverRequest createRolloverRequest(String rolloverTarget, TimeValue masterTimeout, boolean rolloverOnlyIfHasDocuments) { RolloverRequest rolloverRequest = new RolloverRequest(rolloverTarget, null).masterNodeTimeout(masterTimeout); rolloverRequest.dryRun(true); + rolloverRequest.setConditions(applyDefaultConditions(conditions, rolloverOnlyIfHasDocuments)); + return rolloverRequest; + } + + /** + * Apply default conditions to the set of user-defined conditions. + * + * @param conditions the existing conditions + * @param rolloverOnlyIfHasDocuments whether to inject a min_docs 1 condition if there is not already a min_docs + * (or min_primary_shard_docs) condition + * @return the rollover conditions with the default conditions applied. + */ + public static RolloverConditions applyDefaultConditions(RolloverConditions conditions, boolean rolloverOnlyIfHasDocuments) { + var builder = RolloverConditions.newBuilder(conditions); if (rolloverOnlyIfHasDocuments && (conditions.getMinDocs() == null && conditions.getMinPrimaryShardDocs() == null)) { - rolloverRequest.setConditions(RolloverConditions.newBuilder(conditions).addMinIndexDocsCondition(1L).build()); - } else { - rolloverRequest.setConditions(conditions); + builder.addMinIndexDocsCondition(1L); } - long currentMaxPrimaryShardDocs = rolloverRequest.getConditions().getMaxPrimaryShardDocs() != null - ? rolloverRequest.getConditions().getMaxPrimaryShardDocs() + long currentMaxPrimaryShardDocs = conditions.getMaxPrimaryShardDocs() != null + ? conditions.getMaxPrimaryShardDocs() : Long.MAX_VALUE; - if (currentMaxPrimaryShardDocs > MAX_PRIMARY_SHARD_DOCS) { - Map> conditions = new HashMap<>(rolloverRequest.getConditions().getConditions()); - conditions.put(MaxPrimaryShardDocsCondition.NAME, new MaxPrimaryShardDocsCondition(MAX_PRIMARY_SHARD_DOCS)); - rolloverRequest.setConditions(new RolloverConditions(conditions)); - } - return rolloverRequest; + builder.addMaxPrimaryShardDocsCondition(Math.min(currentMaxPrimaryShardDocs, MAX_PRIMARY_SHARD_DOCS)); + return builder.build(); } public RolloverConditions getConditions() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/DeleteLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/DeleteLifecycleAction.java index 2d59d07a55d0d..4e022f2cf1394 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/DeleteLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/DeleteLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -25,7 +24,7 @@ public class DeleteLifecycleAction extends ActionType { public static final String NAME = "cluster:admin/ilm/delete"; protected DeleteLifecycleAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -49,11 +48,6 @@ public String getPolicyName() { return policyName; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ExplainLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ExplainLifecycleAction.java index aef79d89c68c4..b3c23ca3798b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ExplainLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ExplainLifecycleAction.java @@ -15,7 +15,7 @@ public class ExplainLifecycleAction extends ActionType public static final String NAME = "indices:admin/ilm/explain"; protected ExplainLifecycleAction() { - super(NAME, ExplainLifecycleResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index ffa8cadee77b2..97d1fbf524963 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -36,7 +35,7 @@ public class GetLifecycleAction extends ActionType public static final String NAME = "cluster:admin/ilm/get"; protected GetLifecycleAction() { - super(NAME, GetLifecycleAction.Response::new); + super(NAME); } public static class Response extends ActionResponse implements ChunkedToXContentObject { @@ -129,11 +128,6 @@ public String[] getPolicyNames() { return policyNames; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java index 5f16b88e212ef..f70510de382a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.ilm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -26,7 +24,7 @@ public class GetStatusAction extends ActionType { public static final String NAME = "cluster:admin/ilm/operation_mode/get"; protected GetStatusAction() { - super(NAME, GetStatusAction.Response::new); + super(NAME); } public static class Response extends ActionResponse implements ToXContentObject { @@ -82,23 +80,4 @@ public String toString() { } } - - public static class Request extends AcknowledgedRequest { - - public Request(StreamInput in) throws IOException { - super(in); - } - - public Request() {} - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ILMActions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ILMActions.java index ed3c88ef86be1..fb76cc80a0e43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ILMActions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/ILMActions.java @@ -11,9 +11,9 @@ public enum ILMActions { ; - public static final ActionType START = ActionType.localOnly("cluster:admin/ilm/start"); - public static final ActionType STOP = ActionType.localOnly("cluster:admin/ilm/stop"); - public static final ActionType RETRY = ActionType.localOnly("indices:admin/ilm/retry"); - public static final ActionType MOVE_TO_STEP = ActionType.localOnly("cluster:admin/ilm/_move/post"); - public static final ActionType PUT = ActionType.localOnly("cluster:admin/ilm/put"); + public static final ActionType START = new ActionType<>("cluster:admin/ilm/start"); + public static final ActionType STOP = new ActionType<>("cluster:admin/ilm/stop"); + public static final ActionType RETRY = new ActionType<>("indices:admin/ilm/retry"); + public static final ActionType MOVE_TO_STEP = new ActionType<>("cluster:admin/ilm/_move/post"); + public static final ActionType PUT = new ActionType<>("cluster:admin/ilm/put"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java index 4fe4861ce3455..68537fba3bfd1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; @@ -30,7 +29,7 @@ public class RemoveIndexLifecyclePolicyAction extends ActionType public static final String NAME = "cluster:admin/xpack/inference/delete"; public DeleteInferenceModelAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { - private final String modelId; + private final String inferenceEntityId; private final TaskType taskType; - public Request(String modelId, String taskType) { - this.modelId = modelId; + public Request(String inferenceEntityId, String taskType) { + this.inferenceEntityId = inferenceEntityId; this.taskType = TaskType.fromStringOrStatusException(taskType); } public Request(StreamInput in) throws IOException { super(in); - this.modelId = in.readString(); + this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); } - @Override - public ActionRequestValidationException validate() { - return null; - } - - public String getModelId() { - return modelId; + public String getInferenceEntityId() { + return inferenceEntityId; } public TaskType getTaskType() { @@ -59,7 +53,7 @@ public TaskType getTaskType() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(modelId); + out.writeString(inferenceEntityId); taskType.writeTo(out); } @@ -68,12 +62,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteInferenceModelAction.Request request = (DeleteInferenceModelAction.Request) o; - return Objects.equals(modelId, request.modelId) && taskType == request.taskType; + return Objects.equals(inferenceEntityId, request.inferenceEntityId) && taskType == request.taskType; } @Override public int hashCode() { - return Objects.hash(modelId, taskType); + return Objects.hash(inferenceEntityId, taskType); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index 0343206994d2c..3fb753c1af2dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -30,32 +29,27 @@ public class GetInferenceModelAction extends ActionType { - private final String modelId; + private final String inferenceEntityId; private final TaskType taskType; - public Request(String modelId, TaskType taskType) { - this.modelId = Objects.requireNonNull(modelId); + public Request(String inferenceEntityId, TaskType taskType) { + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); this.taskType = Objects.requireNonNull(taskType); } public Request(StreamInput in) throws IOException { super(in); - this.modelId = in.readString(); + this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); } - @Override - public ActionRequestValidationException validate() { - return null; - } - - public String getModelId() { - return modelId; + public String getInferenceEntityId() { + return inferenceEntityId; } public TaskType getTaskType() { @@ -65,7 +59,7 @@ public TaskType getTaskType() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(modelId); + out.writeString(inferenceEntityId); taskType.writeTo(out); } @@ -74,12 +68,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(modelId, request.modelId) && taskType == request.taskType; + return Objects.equals(inferenceEntityId, request.inferenceEntityId) && taskType == request.taskType; } @Override public int hashCode() { - return Objects.hash(modelId, taskType); + return Objects.hash(inferenceEntityId, taskType); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 732bc3d66bedc..1fc477927d7b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -41,7 +41,7 @@ public class InferenceAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/inference"; public InferenceAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest { @@ -56,22 +56,28 @@ public static class Request extends ActionRequest { PARSER.declareObject(Request.Builder::setTaskSettings, (p, c) -> p.mapOrdered(), TASK_SETTINGS); } - public static Request parseRequest(String modelId, String taskType, XContentParser parser) { + public static Request parseRequest(String inferenceEntityId, String taskType, XContentParser parser) { Request.Builder builder = PARSER.apply(parser, null); - builder.setModelId(modelId); + builder.setInferenceEntityId(inferenceEntityId); builder.setTaskType(taskType); return builder.build(); } private final TaskType taskType; - private final String modelId; + private final String inferenceEntityId; private final List input; private final Map taskSettings; private final InputType inputType; - public Request(TaskType taskType, String modelId, List input, Map taskSettings, InputType inputType) { + public Request( + TaskType taskType, + String inferenceEntityId, + List input, + Map taskSettings, + InputType inputType + ) { this.taskType = taskType; - this.modelId = modelId; + this.inferenceEntityId = inferenceEntityId; this.input = input; this.taskSettings = taskSettings; this.inputType = inputType; @@ -80,7 +86,7 @@ public Request(TaskType taskType, String modelId, List input, Map getInput() { @@ -133,7 +139,7 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); taskType.writeTo(out); - out.writeString(modelId); + out.writeString(inferenceEntityId); if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { out.writeStringCollection(input); } else { @@ -141,7 +147,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeGenericMap(taskSettings); if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { - inputType.writeTo(out); + out.writeEnum(inputType); } } @@ -151,7 +157,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return taskType == request.taskType - && Objects.equals(modelId, request.modelId) + && Objects.equals(inferenceEntityId, request.inferenceEntityId) && Objects.equals(input, request.input) && Objects.equals(taskSettings, request.taskSettings) && Objects.equals(inputType, request.inputType); @@ -159,20 +165,20 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(taskType, modelId, input, taskSettings, inputType); + return Objects.hash(taskType, inferenceEntityId, input, taskSettings, inputType); } public static class Builder { private TaskType taskType; - private String modelId; + private String inferenceEntityId; private List input; private Map taskSettings = Map.of(); private Builder() {} - public Builder setModelId(String modelId) { - this.modelId = Objects.requireNonNull(modelId); + public Builder setInferenceEntityId(String inferenceEntityId) { + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); return this; } @@ -197,7 +203,7 @@ public Builder setTaskSettings(Map taskSettings) { } public Request build() { - return new Request(taskType, modelId, input, taskSettings, InputType.INGEST); + return new Request(taskType, inferenceEntityId, input, taskSettings, InputType.INGEST); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index d09b96f897e06..170bff5a1908a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -32,26 +32,26 @@ public class PutInferenceModelAction extends ActionType { private final TaskType taskType; - private final String modelId; + private final String inferenceEntityId; private final BytesReference content; private final XContentType contentType; - public Request(String taskType, String modelId, BytesReference content, XContentType contentType) { + public Request(String taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { this.taskType = TaskType.fromStringOrStatusException(taskType); - this.modelId = modelId; + this.inferenceEntityId = inferenceEntityId; this.content = content; this.contentType = contentType; } public Request(StreamInput in) throws IOException { super(in); - this.modelId = in.readString(); + this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); this.content = in.readBytesReference(); this.contentType = in.readEnum(XContentType.class); @@ -61,8 +61,8 @@ public TaskType getTaskType() { return taskType; } - public String getModelId() { - return modelId; + public String getInferenceEntityId() { + return inferenceEntityId; } public BytesReference getContent() { @@ -76,7 +76,7 @@ public XContentType getContentType() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(modelId); + out.writeString(inferenceEntityId); taskType.writeTo(out); out.writeBytesReference(content); XContentHelper.writeTo(out, contentType); @@ -85,8 +85,8 @@ public void writeTo(StreamOutput out) throws IOException { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); - if (MlStrings.isValidId(this.modelId) == false) { - validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.modelId)); + if (MlStrings.isValidId(this.inferenceEntityId) == false) { + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.inferenceEntityId)); } if (validationException.validationErrors().isEmpty() == false) { @@ -102,14 +102,14 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return taskType == request.taskType - && Objects.equals(modelId, request.modelId) + && Objects.equals(inferenceEntityId, request.inferenceEntityId) && Objects.equals(content, request.content) && contentType == request.contentType; } @Override public int hashCode() { - return Objects.hash(taskType, modelId, content, contentType); + return Objects.hash(taskType, inferenceEntityId, content, contentType); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingInt.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingInt.java new file mode 100644 index 0000000000000..05fc8a3cef1b6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingInt.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +public interface EmbeddingInt { + int getSize(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbedding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbedding.java new file mode 100644 index 0000000000000..a185c2938223e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbedding.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +public interface TextEmbedding { + + /** + * Returns the first text embedding entry in the result list's array size. + * @return the size of the text embedding + * @throws IllegalStateException if the list of embeddings is empty + */ + int getFirstEmbeddingSize() throws IllegalStateException; +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java new file mode 100644 index 0000000000000..4ffef36359589 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Writes a text embedding result in the follow json format + * { + * "text_embedding": [ + * { + * "embedding": [ + * 23 + * ] + * }, + * { + * "embedding": [ + * -23 + * ] + * } + * ] + * } + */ +public record TextEmbeddingByteResults(List embeddings) implements InferenceServiceResults, TextEmbedding { + public static final String NAME = "text_embedding_service_byte_results"; + public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString(); + + public TextEmbeddingByteResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(Embedding::new)); + } + + @Override + public int getFirstEmbeddingSize() { + return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(TEXT_EMBEDDING); + for (Embedding embedding : embeddings) { + embedding.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(embeddings); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public List transformToCoordinationFormat() { + return embeddings.stream() + .map(embedding -> embedding.values.stream().mapToDouble(value -> value).toArray()) + .map(values -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults(TEXT_EMBEDDING, values, false)) + .toList(); + } + + @Override + @SuppressWarnings("deprecation") + public List transformToLegacyFormat() { + var legacyEmbedding = new LegacyTextEmbeddingResults( + embeddings.stream().map(embedding -> new LegacyTextEmbeddingResults.Embedding(embedding.toFloats())).toList() + ); + + return List.of(legacyEmbedding); + } + + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(TEXT_EMBEDDING, embeddings.stream().map(Embedding::asMap).collect(Collectors.toList())); + + return map; + } + + public record Embedding(List values) implements Writeable, ToXContentObject, EmbeddingInt { + public static final String EMBEDDING = "embedding"; + + public Embedding(StreamInput in) throws IOException { + this(in.readCollectionAsImmutableList(StreamInput::readByte)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(values, StreamOutput::writeByte); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startArray(EMBEDDING); + for (Byte value : values) { + builder.value(value); + } + builder.endArray(); + + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public Map asMap() { + return Map.of(EMBEDDING, values); + } + + public List toFloats() { + return values.stream().map(Byte::floatValue).toList(); + } + + @Override + public int getSize() { + return values().size(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java index ace5974866038..75eb4ebc19902 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -40,7 +41,7 @@ * ] * } */ -public record TextEmbeddingResults(List embeddings) implements InferenceServiceResults { +public record TextEmbeddingResults(List embeddings) implements InferenceServiceResults, TextEmbedding { public static final String NAME = "text_embedding_service_results"; public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString(); @@ -58,6 +59,11 @@ public TextEmbeddingResults(StreamInput in) throws IOException { ); } + @Override + public int getFirstEmbeddingSize() { + return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(TEXT_EMBEDDING); @@ -103,13 +109,18 @@ public Map asMap() { return map; } - public record Embedding(List values) implements Writeable, ToXContentObject { + public record Embedding(List values) implements Writeable, ToXContentObject, EmbeddingInt { public static final String EMBEDDING = "embedding"; public Embedding(StreamInput in) throws IOException { this(in.readCollectionAsImmutableList(StreamInput::readFloat)); } + @Override + public int getSize() { + return values.size(); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(values, StreamOutput::writeFloat); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingUtils.java new file mode 100644 index 0000000000000..02cb3b878c7fe --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingUtils.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import java.util.List; + +public class TextEmbeddingUtils { + + /** + * Returns the first text embedding entry's array size. + * @param embeddings the list of embeddings + * @return the size of the text embedding + * @throws IllegalStateException if the list of embeddings is empty + */ + public static int getFirstEmbeddingSize(List embeddings) throws IllegalStateException { + if (embeddings.isEmpty()) { + throw new IllegalStateException("Embeddings list is empty"); + } + + return embeddings.get(0).getSize(); + } + + private TextEmbeddingUtils() {} + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/AuditMlNotificationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/AuditMlNotificationAction.java index 4fc509ca67b38..2090f1cb1bd54 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/AuditMlNotificationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/AuditMlNotificationAction.java @@ -30,7 +30,7 @@ public class AuditMlNotificationAction extends ActionType public static final String NAME = "cluster:internal/xpack/ml/notification"; private AuditMlNotificationAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public enum AuditType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CancelJobModelSnapshotUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CancelJobModelSnapshotUpgradeAction.java index 18d9168fc962d..0a4525e0524d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CancelJobModelSnapshotUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CancelJobModelSnapshotUpgradeAction.java @@ -40,7 +40,7 @@ public class CancelJobModelSnapshotUpgradeAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index fbaa2b8b65141..381fb30b65e68 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -36,7 +36,7 @@ public class CloseJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/close"; private CloseJobAction() { - super(NAME, CloseJobAction.Response::new); + super(NAME); } public static class Request extends BaseTasksRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java index 7af3d1a150ac8..00064138f0362 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java @@ -30,7 +30,7 @@ public class CoordinatedInferenceAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index a187848e86896..5c5e02559b1d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -24,7 +23,7 @@ public class DeleteCalendarAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/calendars/delete"; private DeleteCalendarAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -44,11 +43,6 @@ public String getCalendarId() { return calendarId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index b09f08eccae6b..7d37dc8716387 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -25,7 +24,7 @@ public class DeleteCalendarEventAction extends ActionType public static final String NAME = "cluster:admin/xpack/ml/calendars/events/delete"; private DeleteCalendarEventAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -51,11 +50,6 @@ public String getEventId() { return eventId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 1388fb3e3db8f..61e76d2131057 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -28,7 +27,7 @@ public class DeleteDataFrameAnalyticsAction extends ActionType { @@ -69,11 +68,6 @@ public void setForce(boolean force) { this.force = force; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public String getDescription() { return DELETION_TASK_DESCRIPTION_PREFIX + id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 968aa00df4633..2681fadf8fc59 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -27,7 +26,7 @@ public class DeleteDatafeedAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/datafeeds/delete"; private DeleteDatafeedAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentFragment { @@ -59,11 +58,6 @@ public void setForce(boolean force) { this.force = force; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java index e6f362bbea14c..feb4b16778966 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java @@ -29,7 +29,7 @@ public class DeleteExpiredDataAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/filters/delete"; private DeleteFilterAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -46,11 +45,6 @@ public String getFilterId() { return filterId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java index e93df1b356c17..f3e888ef9599c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -24,7 +23,7 @@ public class DeleteForecastAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/forecast/delete"; private DeleteForecastAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -61,11 +60,6 @@ public void setAllowNoForecasts(boolean allowNoForecasts) { this.allowNoForecasts = allowNoForecasts; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index a101865c6c175..58b67e57acf26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -26,7 +25,7 @@ public class DeleteJobAction extends ActionType { public static final String DELETION_TASK_DESCRIPTION_PREFIX = "delete-job-"; private DeleteJobAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -95,11 +94,6 @@ public boolean getDeleteUserAnnotations() { return deleteUserAnnotations; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public String getDescription() { return DELETION_TASK_DESCRIPTION_PREFIX + jobId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java index 277a82b66ca9b..2bdfcb311f939 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java @@ -24,7 +24,7 @@ public class DeleteModelSnapshotAction extends ActionType public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete"; private DeleteModelSnapshotAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java index fb71884c07c4b..9cd19eab449a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -28,7 +27,7 @@ public class DeleteTrainedModelAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/inference/delete"; private DeleteTrainedModelAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentFragment { @@ -64,11 +63,6 @@ public void setForce(boolean force) { this.force = force; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(TrainedModelConfig.MODEL_ID.getPreferredName(), id); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java index 5295263962430..507060b1e51a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -25,7 +24,7 @@ public class DeleteTrainedModelAliasAction extends ActionType { @@ -61,11 +60,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java index 803563a6724aa..04f1b3ddb2e26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java @@ -23,7 +23,7 @@ public class DeleteTrainedModelAssignmentAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EstimateModelMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EstimateModelMemoryAction.java index ec9200e6f2b46..a5df3ee8ed703 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EstimateModelMemoryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EstimateModelMemoryAction.java @@ -34,7 +34,7 @@ public class EstimateModelMemoryAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index 0874b6ae7e85d..b270c4506ba4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -21,7 +21,7 @@ public class FinalizeJobExecutionAction extends ActionType public static final String NAME = "cluster:internal/xpack/ml/job/finalize_job_execution"; private FinalizeJobExecutionAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends MasterNodeRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index 12fba46e40689..082f6d7aff899 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -31,7 +31,7 @@ public class FlushJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/flush"; private FlushJobAction() { - super(NAME, FlushJobAction.Response::new); + super(NAME); } public static class Request extends JobTaskRequest implements ToXContentObject { @@ -79,7 +79,7 @@ public Request(StreamInput in) throws IOException { advanceTime = in.readOptionalString(); skipTime = in.readOptionalString(); waitForNormalization = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { refreshRequired = in.readBoolean(); } } @@ -93,7 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(advanceTime); out.writeOptionalString(skipTime); out.writeBoolean(waitForNormalization); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(refreshRequired); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java index ed94d145a0276..bb15e4f34d30e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java @@ -32,7 +32,7 @@ public class ForecastJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/forecast"; private ForecastJobAction() { - super(NAME, ForecastJobAction.Response::new); + super(NAME); } public static class Request extends JobTaskRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java index 6aa4d284872b3..a6f056a7b2c86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java @@ -33,7 +33,7 @@ public class GetBucketsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/ml/job/results/buckets/get"; private GetBucketsAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java index 65017cf11fcef..98a79b3e1b800 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java @@ -34,7 +34,7 @@ public class GetCalendarEventsAction extends ActionType public static final String NAME = "cluster:monitor/xpack/ml/calendars/get"; private GetCalendarsAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java index 1847f8f74ed75..bdeece989201f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java @@ -39,7 +39,7 @@ public class GetCategoriesAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedRunningStateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedRunningStateAction.java index 413bc58e6332d..a2cf82dd30e8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedRunningStateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedRunningStateAction.java @@ -41,7 +41,7 @@ public class GetDatafeedRunningStateAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 4ea73d1b7a6ee..1bd266c68a65a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -34,7 +34,7 @@ public class GetDatafeedsAction extends ActionType public static final String ALL = "_all"; private GetDatafeedsAction() { - super(NAME, Response::new); + super(NAME); } public static final class Request extends MasterNodeReadRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 54ffd89b68d44..42afa5f4512a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -54,7 +54,7 @@ public class GetDatafeedsStatsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index dd838d368ee04..2806b4b355a20 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -24,7 +24,7 @@ public class GetFiltersAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/filters/get"; private GetFiltersAction() { - super(NAME, Response::new); + super(NAME); } public static final class Request extends AbstractGetResourcesRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java index a11484c48a981..e3e4184c18dfc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java @@ -32,7 +32,7 @@ public class GetInfluencersAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 490c4dd99fcb0..3d23a91a27f2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -32,7 +32,7 @@ public class GetJobsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/ml/job/get"; private GetJobsAction() { - super(NAME, Response::new); + super(NAME); } public static final class Request extends MasterNodeReadRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index e8b0041875b07..5a13684fcd79f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -57,7 +57,7 @@ public class GetJobsStatsAction extends ActionType private static final String TIMING_STATS = "timing_stats"; private GetJobsStatsAction() { - super(NAME, GetJobsStatsAction.Response::new); + super(NAME); } public static class Request extends BaseTasksRequest { @@ -334,7 +334,7 @@ public boolean equals(Object obj) { } } - private QueryPage jobsStats; + private final QueryPage jobsStats; public Response(QueryPage jobsStats) { super(Collections.emptyList(), Collections.emptyList()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java index 3bfc154275928..a7a99836cdaee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -33,7 +32,7 @@ public class GetMlAutoscalingStats extends ActionType { public static final String NAME = "cluster:monitor/xpack/ml/autoscaling/stats/get"; public GetMlAutoscalingStats() { - super(NAME, Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -46,11 +45,6 @@ public Request(StreamInput in) throws IOException { super(in); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { return new CancellableTask(id, type, action, "get_ml_autoscaling_resources", parentTaskId, headers); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java index edf0a7f7e7e65..7219bcb209fe9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java @@ -39,7 +39,7 @@ public class GetModelSnapshotsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/ml/job/results/records/get"; private GetRecordsAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java index 0c5fbbc065e29..4992177a10f8d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java @@ -33,7 +33,7 @@ public class GetTrainedModelsAction extends ActionType { public static final InferModelAction EXTERNAL_INSTANCE = new InferModelAction(EXTERNAL_NAME); private InferModelAction(String name) { - super(name, Response::new); + super(name); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index 99d190a786564..bd03913290e0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -56,7 +56,7 @@ public class InferTrainedModelDeploymentAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java index 7992093651138..4dfd76fa54793 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java @@ -21,7 +21,7 @@ public class KillProcessAction extends ActionType { public static final String NAME = "cluster:internal/xpack/ml/job/kill/process"; private KillProcessAction() { - super(NAME, KillProcessAction.Response::new); + super(NAME); } public static class Request extends JobTaskRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 3c69056625e89..1f1eb69ce606c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -26,7 +26,7 @@ public class MlInfoAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/ml/info/get"; private MlInfoAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java index c6d4428d0e369..e8b345b3c3ff6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -61,7 +60,7 @@ public class MlMemoryAction extends ActionType { static final String JAVA_INFERENCE_IN_BYTES = "java_inference_in_bytes"; private MlMemoryAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -87,11 +86,6 @@ public String getNodeId() { return nodeId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(nodeId); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index c50342dd78157..b6f852605db9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -39,7 +39,7 @@ public class OpenJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/open"; private OpenJobAction() { - super(NAME, NodeAcknowledgedResponse::new); + super(NAME); } public static class Request extends MasterNodeRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java index 916ebe0d613f2..34c78894f5cd0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java @@ -21,7 +21,7 @@ public class PersistJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/persist"; private PersistJobAction() { - super(NAME, PersistJobAction.Response::new); + super(NAME); } public static class Request extends JobTaskRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index 4afad7a650db0..6ca201fd8034a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -35,7 +35,7 @@ public class PostCalendarEventsAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/data/post"; private PostDataAction() { - super(NAME, PostDataAction.Response::new); + super(NAME); } public static class Response extends BaseTasksResponse implements ToXContentObject, Writeable { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDataFrameAnalyticsAction.java index 86c794084388f..634487aa0d39f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDataFrameAnalyticsAction.java @@ -37,7 +37,7 @@ public class PreviewDataFrameAnalyticsAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/calendars/put"; private PutCalendarAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index 67cf1865efb78..c9da8aa4dd579 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -32,7 +32,7 @@ public class PutDataFrameAnalyticsAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 722b524cdab55..67b1b2f9087e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; @@ -27,7 +26,7 @@ public class PutDatafeedAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; private PutDatafeedAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { @@ -56,11 +55,6 @@ public DatafeedConfig getDatafeed() { return datafeed; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index b2d3196824036..50216b72f20d6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -29,7 +29,7 @@ public class PutFilterAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/filters/put"; private PutFilterAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index ad363e0984ddc..400bdaa3a27ea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; @@ -30,7 +29,7 @@ public class PutJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/put"; private PutJobAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -79,11 +78,6 @@ public Job.Builder getJobBuilder() { return jobBuilder; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java index 5f81290261232..769a3a3dded28 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java @@ -32,7 +32,7 @@ public class PutTrainedModelAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java index b5fc85bdc4f99..9f0b5880f5c51 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java @@ -35,7 +35,7 @@ public class PutTrainedModelAliasAction extends ActionType public static final String NAME = "cluster:admin/xpack/ml/inference/model_aliases/put"; private PutTrainedModelAliasAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index 5341efeec1094..b7fcb98426cc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -34,7 +34,7 @@ public class PutTrainedModelDefinitionPartAction extends ActionType { @@ -91,7 +91,7 @@ public Request(StreamInput in) throws IOException { this.part = in.readVInt(); this.totalDefinitionLength = in.readVLong(); this.totalParts = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -148,7 +148,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(part); out.writeVLong(totalDefinitionLength); out.writeVInt(totalParts); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index c153cbc2c039b..1abae7be95011 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -34,7 +34,7 @@ public class PutTrainedModelVocabularyAction extends ActionType { @@ -86,12 +86,12 @@ public Request(StreamInput in) throws IOException { } else { this.merges = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.scores = in.readCollectionAsList(StreamInput::readDouble); } else { this.scores = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -136,10 +136,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { out.writeStringCollection(merges); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeCollection(scores, StreamOutput::writeDouble); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java index d9b900cc465d5..bc74f16eea0e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -34,7 +33,7 @@ public class ResetJobAction extends ActionType { public static final TransportVersion TRANSPORT_VERSION_INTRODUCED = TransportVersions.V_7_14_0; private ResetJobAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -115,11 +114,6 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, MlTasks.JOB_TASK_ID_PREFIX + jobId, parentTaskId, headers); } - @Override - public ActionRequestValidationException validate() { - return null; - } - public String getJobId() { return jobId; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index b8bff389061b1..eb975133e71eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -31,7 +30,7 @@ public class RevertModelSnapshotAction extends ActionType implements ToXContentObject { @@ -103,11 +102,6 @@ public void setForce(boolean force) { this.force = force; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetResetModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetResetModeAction.java index 3fc9b8a0f10b1..cd1d6f57b5195 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetResetModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetResetModeAction.java @@ -15,7 +15,7 @@ public class SetResetModeAction extends ActionType { public static final String NAME = "cluster:internal/xpack/ml/reset_mode"; private SetResetModeAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index 666a9797fd1ad..9a1574bd2b036 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -26,7 +25,7 @@ public class SetUpgradeModeAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/upgrade_mode"; private SetUpgradeModeAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { @@ -56,11 +55,6 @@ public boolean isEnabled() { return enabled; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index 85a7202817e83..67abda2b3eb64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -42,7 +42,7 @@ public class StartDataFrameAnalyticsAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 38a657bd472da..0ac1203f1144b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -50,7 +50,7 @@ public class StartDatafeedAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/datafeed/start"; private StartDatafeedAction() { - super(NAME, NodeAcknowledgedResponse::new); + super(NAME); } public static class Request extends MasterNodeRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index c05c73bc31ddf..8d9da97538e11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -69,7 +69,7 @@ public class StartTrainedModelDeploymentAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java index d1c82635a83c2..f352e24fee616 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java @@ -42,7 +42,7 @@ public class StopDataFrameAnalyticsAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 209966fd43875..453e7a4528a87 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -39,7 +39,7 @@ public class StopDatafeedAction extends ActionType public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMinutes(5); private StopDatafeedAction() { - super(NAME, StopDatafeedAction.Response::new); + super(NAME); } public static class Request extends BaseTasksRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java index 5fc3776e7013c..b8fb383f53671 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java @@ -34,7 +34,7 @@ public class StopTrainedModelDeploymentAction extends ActionType implements ToXContentObject { @@ -79,7 +79,7 @@ public Request(StreamInput in) throws IOException { allowNoMatch = in.readBoolean(); force = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { finishPendingWork = in.readBoolean(); } else { finishPendingWork = false; @@ -132,7 +132,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(allowNoMatch); out.writeBoolean(force); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeBoolean(finishPendingWork); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java index e58742799d4e6..81a0a95c9f8ba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java @@ -30,7 +30,7 @@ public class TrainedModelCacheInfoAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java index e5096051b1eae..e98eb38db6ea2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java @@ -22,7 +22,7 @@ public class UpdateCalendarJobAction extends ActionType implements ToXContentObject { @@ -71,11 +70,6 @@ public DataFrameAnalyticsConfigUpdate getUpdate() { return update; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { update.toXContent(builder, params); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 53e0927b31312..694ca39d9cd49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -27,7 +26,7 @@ public class UpdateDatafeedAction extends ActionType public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; private UpdateDatafeedAction() { - super(NAME, PutDatafeedAction.Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { @@ -56,11 +55,6 @@ public DatafeedUpdate getUpdate() { return update; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java index e63aa95aeefd7..4e232bea4cc5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java @@ -36,7 +36,7 @@ public class UpdateFilterAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/filters/update"; private UpdateFilterAction() { - super(NAME, PutFilterAction.Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 982ff2e36f9e8..4e80fcab05e2f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; @@ -26,7 +25,7 @@ public class UpdateJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/update"; private UpdateJobAction() { - super(NAME, PutJobAction.Response::new); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { @@ -78,11 +77,6 @@ public boolean isInternal() { return isInternal; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java index c83b7d7578bf1..d80b055cc0fcc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java @@ -32,7 +32,7 @@ public class UpdateModelSnapshotAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index 33e26fb0ab3d1..b598d398025e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -34,7 +34,7 @@ public class UpdateTrainedModelDeploymentAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java index b09eed900838c..7fbcffa476159 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java @@ -30,7 +30,7 @@ public class UpgradeJobModelSnapshotAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java index 2f7b561d69c04..6831f985fdc76 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java @@ -26,7 +26,7 @@ public class ValidateDetectorAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/validate/detector"; protected ValidateDetectorAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index e90ac5c47210e..48549ae100e36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -27,7 +27,7 @@ public class ValidateJobConfigAction extends ActionType { public static final String NAME = "cluster:admin/xpack/ml/job/validate"; protected ValidateJobConfigAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index f818199ac1ef9..695b3ffca120f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -38,7 +38,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.time.TimeUtils; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlStrings; import org.elasticsearch.xpack.core.ml.utils.QueryProvider; @@ -144,7 +143,7 @@ public static void validateAggregations(AggregatorFactories.Builder aggregations } Builder.checkForOnlySingleTopLevelCompositeAggAndValidate(aggregations.getAggregatorFactories()); - AggregationBuilder histogramAggregation = ExtractorUtils.getHistogramAggregation(aggregatorFactories); + AggregationBuilder histogramAggregation = DatafeedConfigUtils.getHistogramAggregation(aggregatorFactories); if (histogramAggregation instanceof CompositeAggregationBuilder && aggregations.getPipelineAggregatorFactories().isEmpty() == false) { throw ExceptionsHelper.badRequestException( @@ -449,7 +448,7 @@ public Map getAggregations() { * @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations */ public long getHistogramIntervalMillis(NamedXContentRegistry namedXContentRegistry) { - return ExtractorUtils.getHistogramIntervalMillis(getParsedAggregations(namedXContentRegistry)); + return DatafeedConfigUtils.getHistogramIntervalMillis(getParsedAggregations(namedXContentRegistry)); } /** @@ -461,7 +460,7 @@ public boolean hasCompositeAgg(NamedXContentRegistry namedXContentRegistry) { if (hasAggregations() == false) { return false; } - AggregationBuilder maybeComposite = ExtractorUtils.getHistogramAggregation( + AggregationBuilder maybeComposite = DatafeedConfigUtils.getHistogramAggregation( getParsedAggregations(namedXContentRegistry).getAggregatorFactories() ); return maybeComposite instanceof CompositeAggregationBuilder; @@ -622,12 +621,14 @@ private static ChunkingConfig defaultChunkingConfig(@Nullable AggProvider aggPro if (aggProvider == null || aggProvider.getParsedAggs() == null) { return ChunkingConfig.newAuto(); } else { - AggregationBuilder histogram = ExtractorUtils.getHistogramAggregation(aggProvider.getParsedAggs().getAggregatorFactories()); + AggregationBuilder histogram = DatafeedConfigUtils.getHistogramAggregation( + aggProvider.getParsedAggs().getAggregatorFactories() + ); if (histogram instanceof CompositeAggregationBuilder) { // Allow composite aggs to handle the underlying chunking and searching return ChunkingConfig.newOff(); } - long histogramIntervalMillis = ExtractorUtils.getHistogramIntervalMillis(histogram); + long histogramIntervalMillis = DatafeedConfigUtils.getHistogramIntervalMillis(histogram); if (histogramIntervalMillis <= 0) { throw ExceptionsHelper.badRequestException(DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO); } @@ -1084,7 +1085,7 @@ void validateScriptFields() { private static void checkNoMoreHistogramAggregations(Collection aggregations) { for (AggregationBuilder agg : aggregations) { - if (ExtractorUtils.isHistogram(agg)) { + if (DatafeedConfigUtils.isHistogram(agg)) { throw ExceptionsHelper.badRequestException(DATAFEED_AGGREGATIONS_MAX_ONE_DATE_HISTOGRAM); } checkNoMoreHistogramAggregations(agg.getSubAggregations()); @@ -1097,7 +1098,7 @@ static void checkHistogramAggregationHasChildMaxTimeAgg(AggregationBuilder histo timeField = ((ValuesSourceAggregationBuilder) histogramAggregation).field(); } if (histogramAggregation instanceof CompositeAggregationBuilder) { - DateHistogramValuesSourceBuilder valueSource = ExtractorUtils.getDateHistogramValuesSource( + DateHistogramValuesSourceBuilder valueSource = DatafeedConfigUtils.getDateHistogramValuesSource( (CompositeAggregationBuilder) histogramAggregation ); timeField = valueSource.field(); @@ -1115,7 +1116,7 @@ static void checkHistogramAggregationHasChildMaxTimeAgg(AggregationBuilder histo } private static void checkHistogramIntervalIsPositive(AggregationBuilder histogramAggregation) { - long interval = ExtractorUtils.getHistogramIntervalMillis(histogramAggregation); + long interval = DatafeedConfigUtils.getHistogramIntervalMillis(histogramAggregation); if (interval <= 0) { throw ExceptionsHelper.badRequestException(DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtils.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtils.java index 3489ba6e21158..b6c1bff955345 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtils.java @@ -4,14 +4,11 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.core.ml.datafeed.extractor; +package org.elasticsearch.xpack.core.ml.datafeed; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Rounding; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -29,21 +26,11 @@ import java.util.concurrent.TimeUnit; /** - * Collects common utility methods needed by various {@link DataExtractor} implementations + * Utility methods used for datafeed configuration. */ -public final class ExtractorUtils { +public final class DatafeedConfigUtils { - private static final String EPOCH_MILLIS = "epoch_millis"; - - private ExtractorUtils() {} - - /** - * Combines a user query with a time range query. - */ - public static QueryBuilder wrapInTimeRangeQuery(QueryBuilder userQuery, String timeField, long start, long end) { - QueryBuilder timeQuery = new RangeQueryBuilder(timeField).gte(start).lt(end).format(EPOCH_MILLIS); - return new BoolQueryBuilder().filter(userQuery).filter(timeQuery); - } + private DatafeedConfigUtils() {} /** * Find the (date) histogram in {@code aggFactory} and extract its interval. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java index 8d4b601a38aad..ad2d03a4b8f75 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java @@ -110,6 +110,11 @@ public Instant getLastStateChangeTime() { return lastStateChangeTime; } + @Override + public boolean isFailed() { + return DataFrameAnalyticsState.FAILED.equals(state); + } + public boolean isStatusStale(PersistentTasksCustomMetadata.PersistentTask task) { return allocationId != task.getAllocationId(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java index bc24ca129635e..a31e83d8246fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.xcontent.ToXContentObject; @@ -45,7 +45,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { * Processes given aggregations as a step towards computing result * @param aggs aggregations from {@link SearchResponse} */ - void process(Aggregations aggs); + void process(InternalAggregations aggs); /** * Gets the evaluation result for this metric. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java index 346996a742cf1..0a1778a6a6f30 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -124,7 +124,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (overallAccuracy.get() == null && aggs.get(OVERALL_ACCURACY_AGG_NAME) instanceof NumericMetricsAggregation.SingleValue) { NumericMetricsAggregation.SingleValue overallAccuracyAgg = aggs.get(OVERALL_ACCURACY_AGG_NAME); overallAccuracy.set(overallAccuracyAgg.value()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java index f7e80e7fcf972..5bdd85e34a7c7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.nested.Nested; @@ -175,7 +175,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result.get() != null) { return; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java index 5279f026722af..e385e9d9d78d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java @@ -16,8 +16,8 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; @@ -183,7 +183,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (topActualClassNames.get() == null && aggs.get(aggName(STEP_1_AGGREGATE_BY_ACTUAL_CLASS)) != null) { Terms termsAgg = aggs.get(aggName(STEP_1_AGGREGATE_BY_ACTUAL_CLASS)); topActualClassNames.set(termsAgg.getBuckets().stream().map(Terms.Bucket::getKeyAsString).sorted().collect(Collectors.toList())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java index 5b9cffd48f284..6936164ceb07e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java @@ -16,8 +16,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.filter.Filters; @@ -140,7 +140,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { final Aggregation classNamesAgg = aggs.get(ACTUAL_CLASSES_NAMES_AGG_NAME); if (topActualClassNames.get() == null && classNamesAgg instanceof Terms topActualClassesAgg) { if (topActualClassesAgg.getSumOfOtherDocCounts() > 0) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java index 646af7848cf23..6aaabc13c86c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java @@ -15,8 +15,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -119,7 +119,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { final Aggregation byClass = aggs.get(BY_ACTUAL_CLASS_AGG_NAME); final Aggregation avgRecall = aggs.get(AVG_RECALL_AGG_NAME); if (result.get() == null diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java index 99d7853ddab3a..83b6fe58498e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -92,7 +92,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { result = evaluate(aggs); } @@ -103,7 +103,7 @@ public Optional getResult() { protected abstract List aggsAt(String actualField, String predictedProbabilityField); - protected abstract EvaluationMetricResult evaluate(Aggregations aggs); + protected abstract EvaluationMetricResult evaluate(InternalAggregations aggs); enum Condition { TP(true, true), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java index e15148b5fd7e1..c06edb66b301a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -155,7 +155,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result.get() != null) { return; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java index bf13b882f3e98..f902274fdc7f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -87,7 +87,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { long[] tp = new long[thresholds.length]; long[] fp = new long[thresholds.length]; long[] tn = new long[thresholds.length]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java index fcbf1c6216239..d2364faaf7859 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +83,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { double[] precisions = new double[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { double threshold = thresholds[i]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java index 07f0cdbb6c17a..8291bcdac30c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +83,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { double[] recalls = new double[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { double threshold = thresholds[i]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java index 28802148220b6..4e8ba57ffbc95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -118,7 +118,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java index 2a50383494abe..d43ff3e5390b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java @@ -13,7 +13,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ObjectParser; @@ -97,7 +97,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java index 9ca3e39d53c4b..00afd2acff200 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -113,7 +113,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java index fa41661771f62..2e1251abecda1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java @@ -13,7 +13,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; @@ -100,7 +100,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue residualSumOfSquares = aggs.get(SS_RES); ExtendedStats extendedStats = aggs.get(ExtendedStatsAggregationBuilder.NAME + "_actual"); // extendedStats.getVariance() is the statistical sumOfSquares divided by count diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index c80027a7234f3..b9fddd3ea30a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -107,7 +107,7 @@ public class TrainedModelConfig implements ToXContentObject, Writeable { public static final ParseField PLATFORM_ARCHITECTURE = new ParseField("platform_architecture"); public static final TransportVersion VERSION_3RD_PARTY_CONFIG_ADDED = TransportVersions.V_8_0_0; - public static final TransportVersion VERSION_ALLOCATION_MEMORY_ADDED = TransportVersions.V_8_500_064; + public static final TransportVersion VERSION_ALLOCATION_MEMORY_ADDED = TransportVersions.V_8_11_X; // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly public static final ObjectParser LENIENT_PARSER = createParser(true); @@ -293,7 +293,7 @@ public TrainedModelConfig(StreamInput in) throws IOException { modelPackageConfig = null; fullDefinition = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { platformArchitecture = in.readOptionalString(); } else { platformArchitecture = null; @@ -482,7 +482,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(fullDefinition); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeOptionalString(platformArchitecture); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index 8147dabda7b48..b7219fbaa2061 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -98,7 +98,7 @@ public final class TrainedModelAssignment implements SimpleDiffable, U extends InferenceConfigUpdate> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java index e18a1d056f57c..9e599eb86b8ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java @@ -59,6 +59,11 @@ public MPNetTokenization(StreamInput in) throws IOException { super(in); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new MPNetTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java index 14bb5499ac4ab..5087ac025915c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java @@ -178,7 +178,7 @@ public ModelPackageConfig(StreamInput in) throws IOException { this.modelType = in.readOptionalString(); this.tags = in.readOptionalCollectionAsList(StreamInput::readString); this.vocabularyFile = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { this.platformArchitecture = in.readOptionalString(); } else { platformArchitecture = null; @@ -308,7 +308,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(modelType); out.writeOptionalStringCollection(tags); out.writeOptionalString(vocabularyFile); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_11_X)) { out.writeOptionalString(platformArchitecture); } if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java index e7f3a66b6748f..b87e7e7edbb71 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java @@ -157,6 +157,23 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof NerConfigUpdate configUpdate) { + return new NerConfig( + vocabularyConfig, + (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization), + classificationLabels, + Optional.ofNullable(update.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new NerConfig(this.vocabularyConfig, updatedTokenization, this.classificationLabels, this.resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java index 884ecb39df448..015aa658b1658 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -92,28 +91,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof NerConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - NerConfig nerConfig = (NerConfig) originalConfig; - if (isNoop(nerConfig)) { - return nerConfig; - } - - return new NerConfig( - nerConfig.getVocabularyConfig(), - (tokenizationUpdate == null) ? nerConfig.getTokenization() : tokenizationUpdate.apply(nerConfig.getTokenization()), - nerConfig.getClassificationLabels(), - Optional.ofNullable(resultsField).orElse(nerConfig.getResultsField()) - ); - } - boolean isNoop(NerConfig originalConfig) { return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java index 67d0edb880a66..dae96dc9a684c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java @@ -29,6 +29,11 @@ public boolean isTargetTypeSupported(TargetType targetType) { return true; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + throw new UnsupportedOperationException("Cannot update NullInferenceConfig objects"); + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.CURRENT; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java index 74ca76779d4b2..0e27fc00b9b70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java @@ -120,6 +120,22 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof PassThroughConfigUpdate configUpdate) { + return new PassThroughConfig( + vocabularyConfig, + (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization), + update.getResultsField() == null ? resultsField : update.getResultsField() + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new PassThroughConfig(this.vocabularyConfig, updatedTokenization, this.resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java index 874f82dc019ca..1a7832a70cfdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java @@ -96,30 +96,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { - return originalConfig; - } - - if (originalConfig instanceof PassThroughConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - PassThroughConfig passThroughConfig = (PassThroughConfig) originalConfig; - return new PassThroughConfig( - passThroughConfig.getVocabularyConfig(), - (tokenizationUpdate == null) - ? passThroughConfig.getTokenization() - : tokenizationUpdate.apply(passThroughConfig.getTokenization()), - resultsField == null ? originalConfig.getResultsField() : resultsField - ); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof PassThroughConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java index 7572d757f2b5f..014cdb1dd891f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java @@ -188,6 +188,32 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof QuestionAnsweringConfigUpdate configUpdate) { + return new QuestionAnsweringConfig( + configUpdate.getQuestion(), + Optional.ofNullable(configUpdate.getNumTopClasses()).orElse(numTopClasses), + Optional.ofNullable(configUpdate.getMaxAnswerLength()).orElse(maxAnswerLength), + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new QuestionAnsweringConfig( + question, + numTopClasses, + maxAnswerLength, + vocabularyConfig, + updatedTokenization, + resultsField + ); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_3_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java index 40657544a14d5..df4cb565731ed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; @@ -126,37 +125,6 @@ public String getWriteableName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof QuestionAnsweringConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - QuestionAnsweringConfig questionAnsweringConfig = (QuestionAnsweringConfig) originalConfig; - return new QuestionAnsweringConfig( - question, - Optional.ofNullable(numTopClasses).orElse(questionAnsweringConfig.getNumTopClasses()), - Optional.ofNullable(maxAnswerLength).orElse(questionAnsweringConfig.getMaxAnswerLength()), - questionAnsweringConfig.getVocabularyConfig(), - tokenizationUpdate == null - ? questionAnsweringConfig.getTokenization() - : tokenizationUpdate.apply(questionAnsweringConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(questionAnsweringConfig.getResultsField()) - ); - } - - boolean isNoop(QuestionAnsweringConfig originalConfig) { - return (numTopClasses == null || numTopClasses.equals(originalConfig.getNumTopClasses())) - && (maxAnswerLength == null || maxAnswerLength.equals(originalConfig.getMaxAnswerLength())) - && (resultsField == null || resultsField.equals(originalConfig.getResultsField())) - && (question == null || question.equals(originalConfig.getQuestion())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof QuestionAnsweringConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java index 8ea53b2725523..337a1ac693128 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java @@ -135,6 +135,24 @@ public boolean isTargetTypeSupported(TargetType targetType) { return TargetType.REGRESSION.equals(targetType); } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof RegressionConfigUpdate configUpdate) { + RegressionConfig.Builder builder = new RegressionConfig.Builder(this); + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.getNumTopFeatureImportanceValues() != null) { + builder.setNumTopFeatureImportanceValues(configUpdate.getNumTopFeatureImportanceValues()); + } + return builder.build(); + } else if (update instanceof ResultsFieldUpdate resultsFieldUpdate) { + return new RegressionConfig.Builder(this).setResultsField(resultsFieldUpdate.getResultsField()).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return requestingImportance() ? MlConfigVersion.V_7_7_0 : MIN_SUPPORTED_VERSION; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index a678806181ef8..dc1a7bdeef104 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -144,41 +144,11 @@ public int hashCode() { return Objects.hash(resultsField, numTopFeatureImportanceValues); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof RegressionConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - RegressionConfig regressionConfig = (RegressionConfig) originalConfig; - if (isNoop(regressionConfig)) { - return originalConfig; - } - RegressionConfig.Builder builder = new RegressionConfig.Builder(regressionConfig); - if (resultsField != null) { - builder.setResultsField(resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues); - } - return builder.build(); - } - @Override public boolean isSupported(InferenceConfig inferenceConfig) { return inferenceConfig instanceof RegressionConfig; } - boolean isNoop(RegressionConfig originalConfig) { - return (resultsField == null || originalConfig.getResultsField().equals(resultsField)) - && (numTopFeatureImportanceValues == null - || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues); - } - public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private Integer numTopFeatureImportanceValues; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index fe1fb9844610d..34d3b1c1e38f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; @@ -34,22 +33,6 @@ public ResultsFieldUpdate(StreamInput in) throws IOException { resultsField = in.readString(); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof ClassificationConfig) { - ClassificationConfigUpdate update = new ClassificationConfigUpdate(null, resultsField, null, null, null); - return update.apply(originalConfig); - } else if (originalConfig instanceof RegressionConfig) { - RegressionConfigUpdate update = new RegressionConfigUpdate(resultsField, null); - return update.apply(originalConfig); - } else { - throw ExceptionsHelper.badRequestException( - "Inference config of unknown type [{}] can not be updated", - originalConfig.getName() - ); - } - } - @Override public boolean isSupported(InferenceConfig config) { return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java index febb7cb40ec82..bbb35ad70b90d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java @@ -85,6 +85,18 @@ public RobertaTokenization(StreamInput in) throws IOException { this.addPrefixSpace = in.readBoolean(); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new RobertaTokenization( + this.doLowerCase, + this.withSpecialTokens, + updatedMaxSeqLength, + Truncate.NONE, + updatedSpan, + this.addPrefixSpace + ); + } + public boolean isAddPrefixSpace() { return addPrefixSpace; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java index ab50f26636fc4..153879d4f61b4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java @@ -133,6 +133,40 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextClassificationConfigUpdate configUpdate) { + TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(this); + if (configUpdate.getNumTopClasses() != null) { + builder.setNumTopClasses(configUpdate.getNumTopClasses()); + } + if (configUpdate.getClassificationLabels() != null) { + if (classificationLabels.size() != configUpdate.getClassificationLabels().size()) { + throw ExceptionsHelper.badRequestException( + "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]", + CLASSIFICATION_LABELS, + classificationLabels.size(), + configUpdate.getClassificationLabels().size() + ); + } + builder.setClassificationLabels(configUpdate.getClassificationLabels()); + } + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.tokenizationUpdate != null) { + builder.setTokenization(configUpdate.tokenizationUpdate.apply(tokenization)); + } + + return builder.build(); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextClassificationConfig.Builder(this).setTokenization(updatedTokenization).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java index 460a3a685d534..5379e3eeb17f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java @@ -111,54 +111,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(resultsField); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextClassificationConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextClassificationConfig classificationConfig = (TextClassificationConfig) originalConfig; - if (isNoop(classificationConfig)) { - return originalConfig; - } - - TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(classificationConfig); - if (numTopClasses != null) { - builder.setNumTopClasses(numTopClasses); - } - if (classificationLabels != null) { - if (classificationLabels.size() != classificationConfig.getClassificationLabels().size()) { - throw ExceptionsHelper.badRequestException( - "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]", - CLASSIFICATION_LABELS, - classificationConfig.getClassificationLabels().size(), - classificationLabels.size() - ); - } - builder.setClassificationLabels(classificationLabels); - } - if (resultsField != null) { - builder.setResultsField(resultsField); - } - - if (tokenizationUpdate != null) { - builder.setTokenization(tokenizationUpdate.apply(classificationConfig.getTokenization())); - } - - return builder.build(); - } - - boolean isNoop(TextClassificationConfig originalConfig) { - return (this.numTopClasses == null || this.numTopClasses == originalConfig.getNumTopClasses()) - && (this.classificationLabels == null) - && (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextClassificationConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java index 518b9eb62d793..d043c17535636 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java @@ -145,6 +145,23 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextEmbeddingConfigUpdate configUpdate) { + return new TextEmbeddingConfig( + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + configUpdate.getResultsField() == null ? resultsField : configUpdate.getResultsField(), + embeddingSize + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextEmbeddingConfig(vocabularyConfig, updatedTokenization, resultsField, embeddingSize); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index 6acd2d209a875..e89281a59f7d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -104,29 +104,6 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_0_0; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { - return originalConfig; - } - - if (originalConfig instanceof TextEmbeddingConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextEmbeddingConfig embeddingConfig = (TextEmbeddingConfig) originalConfig; - return new TextEmbeddingConfig( - embeddingConfig.getVocabularyConfig(), - tokenizationUpdate == null ? embeddingConfig.getTokenization() : tokenizationUpdate.apply(embeddingConfig.getTokenization()), - resultsField == null ? embeddingConfig.getResultsField() : resultsField, - embeddingConfig.getEmbeddingSize() - ); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextEmbeddingConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java index d8315bec14153..c4d78c9faf219 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java @@ -121,6 +121,22 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextExpansionConfigUpdate configUpdate) { + return new TextExpansionConfig( + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextExpansionConfig(vocabularyConfig, updatedTokenization, resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public boolean isAllocateOnly() { return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java index 181cadbaf7168..3ba5c91502480 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java @@ -21,7 +21,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -100,33 +99,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextExpansionConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - TextExpansionConfig textExpansionConfig = (TextExpansionConfig) originalConfig; - if (isNoop(textExpansionConfig)) { - return textExpansionConfig; - } - - return new TextExpansionConfig( - textExpansionConfig.getVocabularyConfig(), - (tokenizationUpdate == null) - ? textExpansionConfig.getTokenization() - : tokenizationUpdate.apply(textExpansionConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(textExpansionConfig.getResultsField()) - ); - } - - boolean isNoop(TextExpansionConfig originalConfig) { - return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextExpansionConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java index 5511df03e6f36..bbd819891e217 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java @@ -149,6 +149,24 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextSimilarityConfigUpdate configUpdate) { + return new TextSimilarityConfig( + configUpdate.getText(), + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField), + Optional.ofNullable(configUpdate.getSpanScoreFunction()).orElse(spanScoreFunction) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextSimilarityConfig(text, vocabularyConfig, updatedTokenization, resultsField, spanScoreFunction); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_5_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java index c7afacc07b944..2ddbf8bd63f49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java @@ -110,31 +110,13 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th return builder; } - @Override - public String getWriteableName() { - return NAME; + public TextSimilarityConfig.SpanScoreFunction getSpanScoreFunction() { + return spanScoreFunction; } @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextSimilarityConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextSimilarityConfig textSimilarityConfig = (TextSimilarityConfig) originalConfig; - return new TextSimilarityConfig( - text, - textSimilarityConfig.getVocabularyConfig(), - tokenizationUpdate == null - ? textSimilarityConfig.getTokenization() - : tokenizationUpdate.apply(textSimilarityConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(textSimilarityConfig.getResultsField()), - Optional.ofNullable(spanScoreFunction).orElse(textSimilarityConfig.getSpanScoreFunction()) - ); + public String getWriteableName() { + return NAME; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java index ef437e0201510..4f301b48cdacc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -49,6 +52,19 @@ public String toString() { } } + record SpanSettings(@Nullable Integer maxSequenceLength, int span) implements Writeable { + + SpanSettings(StreamInput in) throws IOException { + this(in.readOptionalVInt(), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(maxSequenceLength); + out.writeVInt(span); + } + }; + // TODO add global params like never_split, bos_token, eos_token, mask_token, tokenize_chinese_chars, strip_accents, etc. public static final ParseField DO_LOWER_CASE = new ParseField("do_lower_case"); public static final ParseField WITH_SPECIAL_TOKENS = new ParseField("with_special_tokens"); @@ -104,20 +120,8 @@ public static BertTokenization createDefault() { + "] to indicate no windowing should occur" ); } - if (this.span > this.maxSequenceLength) { - throw new IllegalArgumentException( - "[" - + SPAN.getPreferredName() - + "] provided [" - + this.span - + "] must not be greater than [" - + MAX_SEQUENCE_LENGTH.getPreferredName() - + "] provided [" - + this.maxSequenceLength - + "]" - ); - } - validateSpanAndTruncate(truncate, span); + validateSpanAndMaxSequenceLength(this.maxSequenceLength, this.span); + validateSpanAndTruncate(this.truncate, this.span); } public Tokenization(StreamInput in) throws IOException { @@ -132,6 +136,35 @@ public Tokenization(StreamInput in) throws IOException { } } + /** + * Return a copy of this with the tokenizer span settings updated + * @param update The settings to update + * @return An updated Tokenization + */ + public Tokenization updateSpanSettings(SpanSettings update) { + int maxLength = update.maxSequenceLength() == null ? this.maxSequenceLength : update.maxSequenceLength(); + validateSpanAndMaxSequenceLength(maxLength, span); + if (update.maxSequenceLength() != null && update.maxSequenceLength() > this.maxSequenceLength) { + throw new ElasticsearchStatusException( + "Updated max sequence length [{}] cannot be greater " + "than the model's max sequence length [{}]", + RestStatus.BAD_REQUEST, + update.maxSequenceLength(), + this.maxSequenceLength + ); + } + + return buildWindowingTokenization(maxLength, update.span()); + } + + /** + * Build a copy of this with {@code Truncate == NONE} using + * the specified max sequence length and span + * @param updatedMaxSeqLength Max sequence length + * @param updatedSpan Span + * @return A new Tokenization object + */ + abstract Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan); + @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doLowerCase); @@ -160,6 +193,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static void validateSpanAndMaxSequenceLength(int maxSequenceLength, int span) { + if (span > maxSequenceLength) { + throw new IllegalArgumentException( + "[" + + SPAN.getPreferredName() + + "] provided [" + + span + + "] must not be greater than [" + + MAX_SEQUENCE_LENGTH.getPreferredName() + + "] provided [" + + maxSequenceLength + + "]" + ); + } + } + public static void validateSpanAndTruncate(@Nullable Truncate truncate, @Nullable Integer span) { if ((span != null && span != UNSET_SPAN_VALUE) && (truncate != null && truncate.isInCompatibleWithSpan())) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java new file mode 100644 index 0000000000000..2414fe5776438 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * An update that sets the tokenization truncate option to NONE + * and updates the span and max sequence length settings. + */ +public class TokenizationConfigUpdate implements InferenceConfigUpdate { + + public static final String NAME = "tokenization_update"; + + private final Tokenization.SpanSettings spanSettings; + + public TokenizationConfigUpdate(Tokenization.SpanSettings spanSettings) { + this.spanSettings = spanSettings; + } + + public TokenizationConfigUpdate(StreamInput in) throws IOException { + this.spanSettings = new Tokenization.SpanSettings(in); + } + + public Tokenization.SpanSettings getSpanSettings() { + return spanSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + spanSettings.writeTo(out); + } + + @Override + public boolean isSupported(InferenceConfig config) { + return true; + } + + @Override + public String getResultsField() { + return null; + } + + @Override + public Builder, ? extends InferenceConfigUpdate> newBuilder() { + throw new UnsupportedOperationException("Tokenization update is not supported as a builder"); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenizationConfigUpdate that = (TokenizationConfigUpdate) o; + return Objects.equals(spanSettings, that.spanSettings); + } + + @Override + public int hashCode() { + return Objects.hash(spanSettings); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java index 43016e58420ad..648e52538040d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java @@ -72,6 +72,11 @@ public XLMRobertaTokenization(StreamInput in) throws IOException { super(in); } + @Override + protected Tokenization buildWindowingTokenization(int maxSeqLength, int span) { + return new XLMRobertaTokenization(withSpecialTokens, maxSeqLength, Truncate.NONE, span); + } + @Override public String getWriteableName() { return NAME; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java index ba4c130b987d2..4c669f289016a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java @@ -198,6 +198,43 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof ZeroShotClassificationConfigUpdate configUpdate) { + if ((configUpdate.getLabels() == null || configUpdate.getLabels().isEmpty()) + && (this.labels == null || this.labels.isEmpty())) { + throw ExceptionsHelper.badRequestException( + "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]", + LABELS.getPreferredName(), + LABELS.getPreferredName() + ); + } + + return new ZeroShotClassificationConfig( + classificationLabels, + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + hypothesisTemplate, + Optional.ofNullable(configUpdate.getMultiLabel()).orElse(isMultiLabel), + Optional.ofNullable(configUpdate.getLabels()).orElse(labels), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new ZeroShotClassificationConfig( + classificationLabels, + vocabularyConfig, + updatedTokenization, + hypothesisTemplate, + isMultiLabel, + labels, + resultsField + ); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java index 47fd75ed6ff42..8f03d5e3d01cf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -121,45 +120,6 @@ public String getWriteableName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof ZeroShotClassificationConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - ZeroShotClassificationConfig zeroShotConfig = (ZeroShotClassificationConfig) originalConfig; - if ((labels == null || labels.isEmpty()) && (zeroShotConfig.getLabels() == null || zeroShotConfig.getLabels().isEmpty())) { - throw ExceptionsHelper.badRequestException( - "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]", - LABELS.getPreferredName(), - LABELS.getPreferredName() - ); - } - if (isNoop(zeroShotConfig)) { - return originalConfig; - } - return new ZeroShotClassificationConfig( - zeroShotConfig.getClassificationLabels(), - zeroShotConfig.getVocabularyConfig(), - tokenizationUpdate == null ? zeroShotConfig.getTokenization() : tokenizationUpdate.apply(zeroShotConfig.getTokenization()), - zeroShotConfig.getHypothesisTemplate(), - Optional.ofNullable(isMultiLabel).orElse(zeroShotConfig.isMultiLabel()), - Optional.ofNullable(labels).orElse(zeroShotConfig.getLabels().orElse(null)), - Optional.ofNullable(resultsField).orElse(zeroShotConfig.getResultsField()) - ); - } - - boolean isNoop(ZeroShotClassificationConfig originalConfig) { - return (labels == null || labels.equals(originalConfig.getLabels().orElse(null))) - && (isMultiLabel == null || isMultiLabel.equals(originalConfig.isMultiLabel())) - && (resultsField == null || resultsField.equals(originalConfig.getResultsField())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof ZeroShotClassificationConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java index c07cb0cf9c91a..41fd38ca1398c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java @@ -106,6 +106,11 @@ public Instant getLastStateChangeTime() { return lastStateChangeTime; } + @Override + public boolean isFailed() { + return JobState.FAILED.equals(state); + } + /** * The job state stores the allocation ID at the time it was last set. * This method compares the allocation ID in the state with the allocation diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index 883c94093a2c5..2254959242eab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -66,7 +66,7 @@ public FlushAcknowledgement(String id, Instant lastFinalizedBucketEnd, Boolean r public FlushAcknowledgement(StreamInput in) throws IOException { id = in.readString(); lastFinalizedBucketEnd = in.readOptionalInstant(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { refreshRequired = in.readBoolean(); } else { refreshRequired = true; @@ -77,7 +77,7 @@ public FlushAcknowledgement(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeOptionalInstant(lastFinalizedBucketEnd); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(refreshRequired); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java index addc29a4990b4..8fcc977e3faeb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java @@ -29,7 +29,7 @@ public class GetTrainedModelPackageConfigAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/LoadTrainedModelPackageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/LoadTrainedModelPackageAction.java index 2e0d89eec7ae0..fd935c052333d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/LoadTrainedModelPackageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/LoadTrainedModelPackageAction.java @@ -29,7 +29,7 @@ public class LoadTrainedModelPackageAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index 33eac554d0129..57b08ad3f3e31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -51,11 +51,11 @@ public static ResourceNotFoundException missingModelDeployment(String deployment } public static ResourceNotFoundException missingTrainedModel(String modelId) { - return new ResourceNotFoundException("No known trained model with model_id [{}]", modelId); + return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", modelId); } public static ResourceNotFoundException missingTrainedModel(String modelId, Exception cause) { - return new ResourceNotFoundException("No known trained model with model_id [{}]", cause, modelId); + return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", cause, modelId); } public static ElasticsearchException serverError(String msg) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java index 09a7d3827caf2..7600dac441162 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java @@ -18,4 +18,9 @@ public interface MlTaskState { */ @Nullable Instant getLastStateChangeTime(); + + /** + * @return Is the task in the failed state? + */ + boolean isFailed(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java index 919403f3e09a0..eece4d6d5a925 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java @@ -14,6 +14,6 @@ public class MonitoringBulkAction extends ActionType { public static final String NAME = "cluster:admin/xpack/monitoring/bulk"; private MonitoringBulkAction() { - super(NAME, MonitoringBulkResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java index 2b252607b05ad..638e57207fbeb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java @@ -91,6 +91,7 @@ public MonitoringBulkRequest add( null, null, null, + null, true, xContentType, (indexRequest, type) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringMigrateAlertsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringMigrateAlertsAction.java index 895cca529135f..5c742f9a505a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringMigrateAlertsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringMigrateAlertsAction.java @@ -14,6 +14,6 @@ public class MonitoringMigrateAlertsAction extends ActionType implements ToXContentFragment { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index 200c984317d79..6c76ca813170d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -34,7 +34,7 @@ public class GetRollupCapsAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 76b3b7c077924..06a6b4c2a072c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.rollup.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilities; @@ -30,7 +29,7 @@ public class PutRollupJobAction extends ActionType { public static final String NAME = "cluster:admin/xpack/rollup/put"; private PutRollupJobAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { @@ -69,11 +68,6 @@ public void writeTo(StreamOutput out) throws IOException { this.config.writeTo(out); } - @Override - public ActionRequestValidationException validate() { - return null; - } - public RollupActionRequestValidationException validateMappings(Map> fieldCapsResponse) { RollupActionRequestValidationException validationException = new RollupActionRequestValidationException(); if (fieldCapsResponse.size() == 0) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index c0bb62aa610fe..ee720a7819afc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -18,7 +18,7 @@ public class RollupSearchAction extends ActionType { public static final String NAME = "indices:data/read/xpack/rollup/search"; private RollupSearchAction() { - super(NAME, SearchResponse::new); + super(NAME); } public static class RequestBuilder extends ActionRequestBuilder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java index 00a4daaa061c9..b05a2170a1be4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java @@ -28,7 +28,7 @@ public class StartRollupJobAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java index f1527b4c2cafb..bf09f0a0111a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java @@ -35,7 +35,7 @@ public class StopRollupJobAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java index 7596fe75b4173..10b7730b58c9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java @@ -134,12 +134,12 @@ public AsyncStatusResponse(StreamInput in) throws IOException { this.skippedShards = in.readVInt(); this.failedShards = in.readVInt(); this.completionStatus = (this.isRunning == false) ? RestStatus.readFrom(in) : null; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.clusters = in.readOptionalWriteable(SearchResponse.Clusters::new); } else { this.clusters = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.completionTimeMillis = in.readOptionalVLong(); } else { this.completionTimeMillis = null; @@ -160,11 +160,11 @@ public void writeTo(StreamOutput out) throws IOException { if (isRunning == false) { RestStatus.writeTo(out, completionStatus); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { // optional since only CCS uses is; it is null for local-only searches out.writeOptionalWriteable(clusters); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalVLong(completionTimeMillis); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncSearchAction.java index 7c11d93d915b2..2eaf9c5be9633 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncSearchAction.java @@ -13,6 +13,6 @@ public class GetAsyncSearchAction extends ActionType { public static final String NAME = "indices:data/read/async_search/get"; private GetAsyncSearchAction() { - super(NAME, AsyncSearchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncStatusAction.java index 85d43a73ada94..4c6b65a837f97 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncStatusAction.java @@ -13,6 +13,6 @@ public class GetAsyncStatusAction extends ActionType { public static final String NAME = "cluster:monitor/async_search/status"; private GetAsyncStatusAction() { - super(NAME, AsyncStatusResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchAction.java index f1b75a27b92a3..3c616a0fece1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchAction.java @@ -13,6 +13,6 @@ public final class SubmitAsyncSearchAction extends ActionType RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION = ActionType.localOnly( + public static final ActionType RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION = new ActionType<>( "cluster:admin/xpack/security/remote_cluster_credentials/reload" ); + + public static final ActionType QUERY_USER_ACTION = new ActionType<>("cluster:admin/xpack/security/user/query"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ClearSecurityCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ClearSecurityCacheAction.java index 5c7bdb39dc49e..7726ba2596364 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ClearSecurityCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ClearSecurityCacheAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class ClearSecurityCacheAction extends ActionType { @@ -16,6 +15,6 @@ public class ClearSecurityCacheAction extends ActionType { public static final CreateApiKeyAction INSTANCE = new CreateApiKeyAction(); private CreateApiKeyAction() { - super(NAME, CreateApiKeyResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyAction.java index d5bd7f4e6c02e..8abe5251d0209 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyAction.java @@ -18,7 +18,7 @@ public final class CreateCrossClusterApiKeyAction extends ActionType { public static final GetApiKeyAction INSTANCE = new GetApiKeyAction(); private GetApiKeyAction() { - super(NAME, GetApiKeyResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java index 71e0c98fb0012..a8b14795e2dd8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java @@ -26,7 +26,7 @@ */ public final class GetApiKeyRequest extends ActionRequest { - static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_10_X; private final String realmName; private final String userName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyAction.java index 1f9f05ca07e49..c24c9625a5a95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyAction.java @@ -19,7 +19,7 @@ public final class GrantApiKeyAction extends ActionType { public static final GrantApiKeyAction INSTANCE = new GrantApiKeyAction(); private GrantApiKeyAction() { - super(NAME, CreateApiKeyResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/InvalidateApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/InvalidateApiKeyAction.java index 2ca54a3b416b0..90d582ad867de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/InvalidateApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/InvalidateApiKeyAction.java @@ -18,6 +18,6 @@ public final class InvalidateApiKeyAction extends ActionType { public static final QueryApiKeyAction INSTANCE = new QueryApiKeyAction(); private QueryApiKeyAction() { - super(NAME, QueryApiKeyResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java index e7eefaeb3a525..6910eab73ea90 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -125,19 +126,6 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalNamedWriteable(queryBuilder); - out.writeOptionalVInt(from); - out.writeOptionalVInt(size); - if (fieldSortBuilders == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeCollection(fieldSortBuilders); - } - out.writeOptionalWriteable(searchAfterBuilder); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) { - out.writeBoolean(withLimitedBy); - } + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java index c8771a1604b03..80b4ca0cb1e9d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java @@ -8,10 +8,8 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -19,24 +17,18 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; +import java.util.List; import java.util.Objects; /** * Response for search API keys.
    * The result contains information about the API keys that were found. */ -public final class QueryApiKeyResponse extends ActionResponse implements ToXContentObject, Writeable { +public final class QueryApiKeyResponse extends ActionResponse implements ToXContentObject { private final long total; private final Item[] items; - public QueryApiKeyResponse(StreamInput in) throws IOException { - super(in); - this.total = in.readLong(); - this.items = in.readArray(Item::new, Item[]::new); - } - public QueryApiKeyResponse(long total, Collection items) { this.total = total; Objects.requireNonNull(items, "items must be provided"); @@ -44,7 +36,7 @@ public QueryApiKeyResponse(long total, Collection items) { } public static QueryApiKeyResponse emptyResponse() { - return new QueryApiKeyResponse(0, Collections.emptyList()); + return new QueryApiKeyResponse(0, List.of()); } public long getTotal() { @@ -67,8 +59,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { - out.writeLong(total); - out.writeArray(items); + TransportAction.localOnly(); } @Override @@ -91,7 +82,7 @@ public String toString() { return "QueryApiKeyResponse{" + "total=" + total + ", items=" + Arrays.toString(items) + '}'; } - public static class Item implements ToXContentObject, Writeable { + public static class Item implements ToXContentObject { private final ApiKey apiKey; @Nullable private final Object[] sortValues; @@ -101,11 +92,6 @@ public Item(ApiKey apiKey, @Nullable Object[] sortValues) { this.sortValues = sortValues; } - public Item(StreamInput in) throws IOException { - this.apiKey = new ApiKey(in); - this.sortValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new); - } - public ApiKey getApiKey() { return apiKey; } @@ -114,12 +100,6 @@ public Object[] getSortValues() { return sortValues; } - @Override - public void writeTo(StreamOutput out) throws IOException { - apiKey.writeTo(out); - out.writeOptionalArray(Lucene::writeSortValue, sortValues); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyAction.java index 9cacc909b14ea..c42f7a12bea87 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyAction.java @@ -15,6 +15,6 @@ public final class UpdateApiKeyAction extends ActionType { public static final UpdateApiKeyAction INSTANCE = new UpdateApiKeyAction(); private UpdateApiKeyAction() { - super(NAME, UpdateApiKeyResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyAction.java index 9cfda77b3b5f1..e83e67b8fdfff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyAction.java @@ -15,6 +15,6 @@ public final class UpdateCrossClusterApiKeyAction extends ActionType { @@ -16,6 +15,6 @@ public class ClearPrivilegesCacheAction extends ActionType public static final String NAME = "cluster:admin/xpack/security/privilege/get"; private GetPrivilegesAction() { - super(NAME, GetPrivilegesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java index deb14567b9048..0380380040739 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java @@ -17,6 +17,6 @@ public final class PutPrivilegesAction extends ActionType public static final String NAME = "cluster:admin/xpack/security/privilege/put"; private PutPrivilegesAction() { - super(NAME, PutPrivilegesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileAction.java index 92fd74bd70538..28b177d912a49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileAction.java @@ -15,6 +15,6 @@ public class ActivateProfileAction extends ActionType { public static final ActivateProfileAction INSTANCE = new ActivateProfileAction(); public ActivateProfileAction() { - super(NAME, ActivateProfileResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesAction.java index 2ba86a1559588..c35693e17d1fe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesAction.java @@ -15,6 +15,6 @@ public class GetProfilesAction extends ActionType { public static final GetProfilesAction INSTANCE = new GetProfilesAction(); public GetProfilesAction() { - super(NAME, GetProfilesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SetProfileEnabledAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SetProfileEnabledAction.java index c6a61b7e2838a..369147cc5dc4f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SetProfileEnabledAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SetProfileEnabledAction.java @@ -16,6 +16,6 @@ public class SetProfileEnabledAction extends ActionType { public static final SetProfileEnabledAction INSTANCE = new SetProfileEnabledAction(); public SetProfileEnabledAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesAction.java index f8e4d31f9d5cf..bafa85f79c618 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesAction.java @@ -15,6 +15,6 @@ public class SuggestProfilesAction extends ActionType { public static final SuggestProfilesAction INSTANCE = new SuggestProfilesAction(); public SuggestProfilesAction() { - super(NAME, SuggestProfilesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataAction.java index c9dd8eaada068..c6a25b97a8c8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataAction.java @@ -16,6 +16,6 @@ public class UpdateProfileDataAction extends ActionType { public static final UpdateProfileDataAction INSTANCE = new UpdateProfileDataAction(); public UpdateProfileDataAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java index 443babe3fdee7..acd1c94f20fea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.realm; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class ClearRealmCacheAction extends ActionType { @@ -15,6 +14,6 @@ public class ClearRealmCacheAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/realm/cache/clear"; protected ClearRealmCacheAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java index cef2665df87be..a772e948b93d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; /** * The action for clearing the cache used by native roles that are stored in an index. @@ -18,6 +17,6 @@ public class ClearRolesCacheAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/roles/cache/clear"; protected ClearRolesCacheAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java index 2b4ad261ccae2..70c8378f227ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java @@ -17,6 +17,6 @@ public class DeleteRoleAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/role/delete"; protected DeleteRoleAction() { - super(NAME, DeleteRoleResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java index 543b1485b035f..ac48571d8717e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java @@ -17,6 +17,6 @@ public class GetRolesAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/role/get"; protected GetRolesAction() { - super(NAME, GetRolesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java index 548e7e043b1bc..015b9be849764 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java @@ -17,6 +17,6 @@ public class PutRoleAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/role/put"; protected PutRoleAction() { - super(NAME, PutRoleResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java index a470f36f905a6..77e4efdb3363b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java @@ -18,6 +18,6 @@ public class DeleteRoleMappingAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/role_mapping/get"; private GetRoleMappingsAction() { - super(NAME, GetRoleMappingsResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java index d3bdb9c01637a..c876df5972317 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java @@ -17,6 +17,6 @@ public class PutRoleMappingAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/role_mapping/put"; private PutRoleMappingAction() { - super(NAME, PutRoleMappingResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java index bde668e7a1724..e0be9d6b097f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java @@ -17,6 +17,6 @@ public final class SamlAuthenticateAction extends ActionType { public static final SamlLogoutAction INSTANCE = new SamlLogoutAction(); private SamlLogoutAction() { - super(NAME, SamlLogoutResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java index 0eda263da44ab..a93bf2dd50cd7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java @@ -17,6 +17,6 @@ public final class SamlPrepareAuthenticationAction extends ActionType { public static final SamlSpMetadataAction INSTANCE = new SamlSpMetadataAction(); private SamlSpMetadataAction() { - super(NAME, SamlSpMetadataResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/CreateServiceAccountTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/CreateServiceAccountTokenAction.java index 4771196c04fde..8b5cebf718bc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/CreateServiceAccountTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/CreateServiceAccountTokenAction.java @@ -15,6 +15,6 @@ public class CreateServiceAccountTokenAction extends ActionType { @@ -16,6 +15,6 @@ public class GetServiceAccountNodesCredentialsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 88bc63a3a78f8..20feb0faf5033 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -45,7 +45,7 @@ public class UpdateSecuritySettingsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java index e468b4e1033f7..5828abdcf7ea8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java @@ -17,6 +17,6 @@ public final class CreateTokenAction extends ActionType { public static final CreateTokenAction INSTANCE = new CreateTokenAction(); private CreateTokenAction() { - super(NAME, CreateTokenResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index 2736138574b5d..6172a6cbd8e33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -17,6 +17,6 @@ public final class InvalidateTokenAction extends ActionType { public static final RefreshTokenAction INSTANCE = new RefreshTokenAction(); private RefreshTokenAction() { - super(NAME, CreateTokenResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java index e5d6dc3922363..7c530f79d4f52 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java @@ -14,6 +14,6 @@ public class AuthenticateAction extends ActionType { public static final AuthenticateAction INSTANCE = new AuthenticateAction(); public AuthenticateAction() { - super(NAME, AuthenticateResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java index 73ee4d1f27299..dfad1fe376706 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java @@ -20,7 +20,7 @@ public class AuthenticateResponse extends ActionResponse implements ToXContent { - public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_500_061; + public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_10_X; private final Authentication authentication; private final boolean operator; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java index 657e377c1b208..538a0ab0269c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java @@ -17,6 +17,6 @@ public class DeleteUserAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/user/delete"; protected DeleteUserAction() { - super(NAME, DeleteUserResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java index b6a93a1e3f07f..6d0faf89e7bd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java @@ -17,6 +17,6 @@ public final class GetUserPrivilegesAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/user/get"; protected GetUsersAction() { - super(NAME, GetUsersResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java index cc060b4cddf88..9453d9f769b47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; /** @@ -17,8 +18,12 @@ public class HasPrivilegesAction extends ActionType { public static final HasPrivilegesAction INSTANCE = new HasPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/user/has_privileges"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + HasPrivilegesResponse::new + ); private HasPrivilegesAction() { - super(NAME, HasPrivilegesResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesAction.java index 186bf4966c48f..2a65cc85b6d00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesAction.java @@ -15,6 +15,6 @@ public class ProfileHasPrivilegesAction extends ActionType { public static final String NAME = "cluster:admin/xpack/security/user/put"; protected PutUserAction() { - super(NAME, PutUserResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java new file mode 100644 index 0000000000000..6db7e93b66eda --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request for the query Users API.
    + * Model for API requests to the query users API + */ +public final class QueryUserRequest extends ActionRequest { + + @Nullable + private final QueryBuilder queryBuilder; + @Nullable + private final Integer from; + @Nullable + private final Integer size; + @Nullable + private final List fieldSortBuilders; + @Nullable + private final SearchAfterBuilder searchAfterBuilder; + + public QueryUserRequest() { + this(null); + } + + public QueryUserRequest(QueryBuilder queryBuilder) { + this(queryBuilder, null, null, null, null); + } + + public QueryUserRequest( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) { + this.queryBuilder = queryBuilder; + this.from = from; + this.size = size; + this.fieldSortBuilders = fieldSortBuilders; + this.searchAfterBuilder = searchAfterBuilder; + } + + public QueryBuilder getQueryBuilder() { + return queryBuilder; + } + + public Integer getFrom() { + return from; + } + + public Integer getSize() { + return size; + } + + public List getFieldSortBuilders() { + return fieldSortBuilders; + } + + public SearchAfterBuilder getSearchAfterBuilder() { + return searchAfterBuilder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (from != null && from < 0) { + validationException = addValidationError("[from] parameter cannot be negative but was [" + from + "]", validationException); + } + if (size != null && size < 0) { + validationException = addValidationError("[size] parameter cannot be negative but was [" + size + "]", validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java new file mode 100644 index 0000000000000..57d156cf05ca0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; + +/** + * Response for the query Users API.
    + * Model used to serialize information about the Users that were found. + */ +public final class QueryUserResponse extends ActionResponse implements ToXContentObject { + + private final long total; + private final Item[] items; + + public QueryUserResponse(long total, Collection items) { + this.total = total; + Objects.requireNonNull(items, "items must be provided"); + this.items = items.toArray(new Item[0]); + } + + public static QueryUserResponse emptyResponse() { + return new QueryUserResponse(0, Collections.emptyList()); + } + + public long getTotal() { + return total; + } + + public Item[] getItems() { + return items; + } + + public int getCount() { + return items.length; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject().field("total", total).field("count", items.length).array("users", (Object[]) items); + return builder.endObject(); + } + + @Override + public String toString() { + return "QueryUsersResponse{" + "total=" + total + ", items=" + Arrays.toString(items) + '}'; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + + public record Item(User user, @Nullable Object[] sortValues) implements ToXContentObject { + + @Override + public Object[] sortValues() { + return sortValues; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + user.innerToXContent(builder); + if (sortValues != null && sortValues.length > 0) { + builder.array("_sort", sortValues); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "Item{" + "user=" + user + ", sortValues=" + Arrays.toString(sortValues) + '}'; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index f39eca877432c..2857cbfd1bdd2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -52,7 +52,7 @@ */ public class RoleDescriptor implements ToXContentObject, Writeable { - public static final TransportVersion WORKFLOWS_RESTRICTION_VERSION = TransportVersions.V_8_500_020; + public static final TransportVersion WORKFLOWS_RESTRICTION_VERSION = TransportVersions.V_8_9_X; public static final String ROLE_TYPE = "role"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index f93599cdb98cc..dd2baca058102 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; import org.elasticsearch.xpack.core.ilm.action.ILMActions; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyAction; @@ -234,6 +235,7 @@ public class ClusterPrivilegeResolver { GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME + "*", GetUsersAction.NAME, + ActionTypes.QUERY_USER_ACTION.name(), GetUserPrivilegesAction.NAME, // normally authorized under the "same-user" authz check, but added here for uniformity HasPrivilegesAction.NAME, GetSecuritySettingsAction.NAME @@ -324,6 +326,16 @@ public class ClusterPrivilegeResolver { CROSS_CLUSTER_REPLICATION_PATTERN ); + public static final NamedClusterPrivilege READ_CONNECTOR_SECRETS = new ActionClusterPrivilege( + "read_connector_secrets", + Set.of("cluster:admin/xpack/connector/secret/get") + ); + + public static final NamedClusterPrivilege WRITE_CONNECTOR_SECRETS = new ActionClusterPrivilege( + "write_connector_secrets", + Set.of("cluster:admin/xpack/connector/secret/post", "cluster:admin/xpack/connector/secret/delete") + ); + private static final Map VALUES = sortByAccessLevel( Stream.of( NONE, @@ -378,7 +390,9 @@ public class ClusterPrivilegeResolver { POST_BEHAVIORAL_ANALYTICS_EVENT, MANAGE_SEARCH_QUERY_RULES, CROSS_CLUSTER_SEARCH, - CROSS_CLUSTER_REPLICATION + CROSS_CLUSTER_REPLICATION, + READ_CONNECTOR_SECRETS, + WRITE_CONNECTOR_SECRETS ).filter(Objects::nonNull).toList() ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java index 461da66233f4b..17a23f6b66b5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -22,7 +21,7 @@ public class DeleteSnapshotLifecycleAction extends ActionType { @@ -44,11 +43,6 @@ public String getLifecycleId() { return this.lifecycleId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java index 848cca8d9d951..8a8ecf3a747a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -29,7 +28,7 @@ public class ExecuteSnapshotLifecycleAction extends ActionType implements ToXContentObject { @@ -57,11 +56,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(lifecycleId); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java index 2111e35f6cb46..9574ba7fff685 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -22,7 +21,7 @@ public class ExecuteSnapshotRetentionAction extends ActionType implements ToXContentObject { @@ -33,11 +32,6 @@ public Request(StreamInput in) throws IOException { super(in); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSLMStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSLMStatusAction.java index 3008e8f2763d1..c7e108b8fec3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSLMStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSLMStatusAction.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; @@ -24,7 +22,7 @@ public class GetSLMStatusAction extends ActionType public static final String NAME = "cluster:admin/slm/status"; protected GetSLMStatusAction() { - super(NAME, GetSLMStatusAction.Response::new); + super(NAME); } public static class Response extends ActionResponse implements ToXContentObject { @@ -57,23 +55,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } } - - public static class Request extends AcknowledgedRequest { - - public Request(StreamInput in) throws IOException { - super(in); - } - - public Request() {} - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java index 1a95d649f2616..50bf1d1eaf9ea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -28,7 +27,7 @@ public class GetSnapshotLifecycleAction extends ActionType { @@ -52,11 +51,6 @@ public String[] getLifecycleIds() { return this.lifecycleIds; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java index 8c69de784cb20..6279c4208b878 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,26 +29,7 @@ public class GetSnapshotLifecycleStatsAction extends ActionType { - - public Request() {} - - public Request(StreamInput in) throws IOException { - super(in); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } + super(NAME); } public static class Response extends ActionResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/PutSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/PutSnapshotLifecycleAction.java index 8b4410caa86f1..8dadc11cd26ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/PutSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/PutSnapshotLifecycleAction.java @@ -27,7 +27,7 @@ public class PutSnapshotLifecycleAction extends ActionType public static final String NAME = "cluster:admin/slm/put"; protected PutSnapshotLifecycleAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java index 162ba0369506d..d6deb7bda384f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -20,7 +19,7 @@ public class StartSLMAction extends ActionType { public static final String NAME = "cluster:admin/slm/start"; protected StartSLMAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -31,11 +30,6 @@ public Request(StreamInput in) throws IOException { public Request() {} - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return 86; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java index 7b8d2b9687858..60be1b99cde8d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.slm.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -20,7 +19,7 @@ public class StopSLMAction extends ActionType { public static final String NAME = "cluster:admin/slm/stop"; protected StopSLMAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -31,11 +30,6 @@ public Request(StreamInput in) throws IOException { public Request() {} - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return 85; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/action/SpatialStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/action/SpatialStatsAction.java index ceee29d173d9b..117d613a20cd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/action/SpatialStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/action/SpatialStatsAction.java @@ -33,7 +33,7 @@ public class SpatialStatsAction extends ActionType public static final String NAME = "cluster:monitor/xpack/spatial/stats"; private SpatialStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index 985bab5a0d1d9..cbb747272eebc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -32,7 +32,7 @@ public class GetCertificateInfoAction extends ActionType { public static final TermsEnumAction INSTANCE = new TermsEnumAction(); public static final String NAME = "indices:data/read/xpack/termsenum/list"; + public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + NAME, + TermsEnumResponse::new + ); static final ParseField INDEX_FILTER = new ParseField("index_filter"); static final ParseField TIMEOUT = new ParseField("timeout"); private TermsEnumAction() { - super(NAME, TermsEnumResponse::new); + super(NAME); } public static TermsEnumRequest fromXContent(XContentParser parser, String... indices) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java index a6e9ac228fe9f..61fc666829eea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java @@ -15,12 +15,10 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -35,7 +33,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; @@ -64,6 +61,7 @@ import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; @@ -102,7 +100,8 @@ public class TransportTermsEnumAction extends HandledTransportAction listener) { + // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can + coordinationExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); + } + + private void doExecuteForked(Task task, TermsEnumRequest request, ActionListener listener) { if (ccsCheckCompatibility) { checkCCSVersionCompatibility(request); } @@ -188,6 +198,7 @@ private static NodeTermsEnumResponse readShardResponse(StreamInput in) throws IO } protected Map> getNodeBundles(ClusterState clusterState, String[] concreteIndices) { + assert Transports.assertNotTransportThread("O(#shards) work is too much for transport threads"); // Group targeted shards by nodeId Map> fastNodeBundles = new HashMap<>(); for (String indexName : concreteIndices) { @@ -231,6 +242,7 @@ private static TermsEnumResponse mergeResponses( boolean complete, Map> nodeBundles ) { + assert Transports.assertNotTransportThread("O(#shards) work is too much for transport threads"); int successfulShards = 0; int failedShards = 0; List shardFailures = null; @@ -601,8 +613,8 @@ public NodeTermsEnumResponse read(StreamInput in) throws IOException { } @Override - public Executor executor(ThreadPool threadPool) { - return TransportResponseHandler.TRANSPORT_WORKER; + public Executor executor() { + return coordinationExecutor; } @Override @@ -627,12 +639,8 @@ void performRemoteClusterOperation(final String clusterAlias, final OriginalIndi try { TermsEnumRequest req = new TermsEnumRequest(request).indices(remoteIndices.indices()); - Client remoteClient = remoteClusterService.getRemoteClusterClient( - transportService.getThreadPool(), - clusterAlias, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); - remoteClient.execute(TermsEnumAction.INSTANCE, req, new ActionListener<>() { + var remoteClient = remoteClusterService.getRemoteClusterClient(clusterAlias, coordinationExecutor); + remoteClient.execute(TermsEnumAction.REMOTE_TYPE, req, new ActionListener<>() { @Override public void onResponse(TermsEnumResponse termsEnumResponse) { onRemoteClusterResponse( @@ -738,10 +746,8 @@ private void asyncNodeOperation(NodeTermsEnumRequest request, ActionListener dataNodeOperation(request))); + final Executor executor = ex.getQueue().size() == 0 ? ex : shardExecutor; + executor.execute(ActionRunnable.supply(listener, () -> dataNodeOperation(request))); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java index e760471a6f1c2..98bdff8cbced7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java @@ -38,7 +38,7 @@ public class FindStructureAction extends ActionType INSTANCE = new ActionType<>( + "cluster:monitor/text_structure/test_grok_pattern" + ); + + public static class Request extends ActionRequest { + + public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); + public static final ParseField TEXT = new ParseField("text"); + public static final ParseField ECS_COMPATIBILITY = new ParseField("ecs_compatibility"); + + private static final ObjectParser PARSER = createParser(); + + private static ObjectParser createParser() { + ObjectParser parser = new ObjectParser<>("textstructure/testgrokpattern", false, Request.Builder::new); + parser.declareString(Request.Builder::grokPattern, GROK_PATTERN); + parser.declareStringArray(Request.Builder::text, TEXT); + return parser; + } + + public static class Builder { + private String grokPattern; + private List text; + private String ecsCompatibility; + + public Builder grokPattern(String grokPattern) { + this.grokPattern = grokPattern; + return this; + } + + public Builder text(List text) { + this.text = text; + return this; + } + + public Builder ecsCompatibility(String ecsCompatibility) { + this.ecsCompatibility = Strings.isNullOrEmpty(ecsCompatibility) ? null : ecsCompatibility; + return this; + } + + public Request build() { + return new Request(grokPattern, text, ecsCompatibility); + } + } + + private final String grokPattern; + private final List text; + private final String ecsCompatibility; + + private Request(String grokPattern, List text, String ecsCompatibility) { + this.grokPattern = ExceptionsHelper.requireNonNull(grokPattern, GROK_PATTERN.getPreferredName()); + this.text = ExceptionsHelper.requireNonNull(text, TEXT.getPreferredName()); + this.ecsCompatibility = ecsCompatibility; + } + + public static Request parseRequest(String ecsCompatibility, XContentParser parser) throws IOException { + return PARSER.parse(parser, null).ecsCompatibility(ecsCompatibility).build(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + + public String getGrokPattern() { + return grokPattern; + } + + public List getText() { + return text; + } + + public String getEcsCompatibility() { + return ecsCompatibility; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(grokPattern, request.grokPattern) && Objects.equals(text, request.text); + } + + @Override + public int hashCode() { + return Objects.hash(grokPattern, text); + } + + @Override + public String toString() { + return "Request{" + "grokPattern='" + grokPattern + '\'' + ", text=" + text + '}'; + } + } + + public static class Response extends ActionResponse implements ToXContentObject, Writeable { + + private final List> ranges; + + public Response(List> ranges) { + this.ranges = ranges; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray("matches"); + for (Map ranges : ranges) { + builder.startObject(); + builder.field("matched", ranges != null); + if (ranges != null) { + builder.startObject("fields"); + for (Map.Entry rangeOrList : ranges.entrySet()) { + List listOfRanges; + if (rangeOrList.getValue() instanceof List) { + listOfRanges = (List) rangeOrList.getValue(); + } else { + listOfRanges = List.of(rangeOrList.getValue()); + } + builder.startArray(rangeOrList.getKey()); + for (Object rangeObject : listOfRanges) { + GrokCaptureExtracter.Range range = (GrokCaptureExtracter.Range) rangeObject; + builder.startObject(); + builder.field("match", range.match()); + builder.field("offset", range.offset()); + builder.field("length", range.length()); + builder.endObject(); + } + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java index 6f7cc7cf1eea6..ef61187757445 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -26,7 +25,7 @@ public class DeleteTransformAction extends ActionType { public static final String NAME = "cluster:admin/transform/delete"; private DeleteTransformAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -74,11 +73,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java index e492a98748af2..f1b11daf9e732 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -40,9 +41,10 @@ public class GetCheckpointAction extends ActionType REMOTE_TYPE = new RemoteClusterActionType<>(NAME, Response::new); private GetCheckpointAction() { - super(NAME, GetCheckpointAction.Response::new); + super(NAME); } public static class Request extends ActionRequest implements IndicesRequest.Replaceable { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java index 8e67dbc6daacd..908810cacf2ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java @@ -39,7 +39,7 @@ public class GetCheckpointNodeAction extends ActionType private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GetTransformAction.class); private GetTransformAction() { - super(NAME, GetTransformAction.Response::new); + super(NAME); } public static class Request extends AbstractGetResourcesRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 46e844f93695e..b7259f9bd8d60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -46,7 +46,7 @@ public class GetTransformStatsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index 98018589e73a4..67c5e22902cf2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -52,7 +52,7 @@ public class PreviewTransformAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java index 5f278db31d5ef..b9f186ec10833 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java @@ -35,7 +35,7 @@ public class PutTransformAction extends ActionType { private static final TimeValue MAX_FREQUENCY = TimeValue.timeValueHours(1); private PutTransformAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java index 61edebe804d1a..9d77ffdc0c218 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.transform.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -26,7 +25,7 @@ public class ResetTransformAction extends ActionType { public static final ResetTransformAction INSTANCE = new ResetTransformAction(); private ResetTransformAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { @@ -61,11 +60,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(force); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java index 6a50bd40517e1..ebb17ee0d90ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java @@ -35,7 +35,7 @@ public class ScheduleNowTransformAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetResetModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetResetModeAction.java index 8b4b4d438d4e8..8fcca8ddcb821 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetResetModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetResetModeAction.java @@ -15,7 +15,7 @@ public class SetResetModeAction extends ActionType { public static final String NAME = "cluster:internal/xpack/transform/reset_mode"; private SetResetModeAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java index e5d11e8bc4c47..268098c092b0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; @@ -31,7 +30,7 @@ public class StartTransformAction extends ActionType { @@ -72,11 +71,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java index 794bf009764f3..6c7f1be2c3231 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java @@ -43,7 +43,7 @@ public class StopTransformAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index b2a764b0be5b0..6a2394463f4b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -40,7 +40,7 @@ public class UpdateTransformAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java index d5449519d2627..b7dea916accbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.transform.action; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -28,7 +27,7 @@ public class UpgradeTransformsAction extends ActionType { @@ -45,11 +44,6 @@ public Request(boolean dryRun, TimeValue timeout) { this.dryRun = dryRun; } - @Override - public ActionRequestValidationException validate() { - return null; - } - public boolean isDryRun() { return dryRun; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java index 34d737a4745db..10e086335825f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java @@ -27,7 +27,7 @@ public class ValidateTransformAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/QueryWatchesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/QueryWatchesAction.java index 70ffdc98831f8..31c27cbe3d470 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/QueryWatchesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/QueryWatchesAction.java @@ -41,7 +41,7 @@ public class QueryWatchesAction extends ActionType public static final String NAME = "cluster:monitor/xpack/watcher/watch/query"; private QueryWatchesAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java index 5cba85377df98..4b147a6e799e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java @@ -17,6 +17,6 @@ public class AckWatchAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/watch/ack"; private AckWatchAction() { - super(NAME, AckWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java index 6e1add022e4b3..d18ab74c33942 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java @@ -17,6 +17,6 @@ public class ActivateWatchAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/watch/activate"; private ActivateWatchAction() { - super(NAME, ActivateWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index 4d9cf9f6a896a..4003939f064a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -18,6 +18,6 @@ public class DeleteWatchAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/watch/delete"; private DeleteWatchAction() { - super(NAME, DeleteWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java index bb5f92f20d7a8..7c6258b77d475 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java @@ -18,6 +18,6 @@ public class ExecuteWatchAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/watch/execute"; private ExecuteWatchAction() { - super(NAME, ExecuteWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java index 004c9eece9b95..c464c37b1a3a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java @@ -17,6 +17,6 @@ public class GetWatchAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/watcher/watch/get"; private GetWatchAction() { - super(NAME, GetWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java index cc3339d68d81a..576bd220853ce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java @@ -25,7 +25,7 @@ public class GetWatcherSettingsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index d8b255d433132..7235577c7cdac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -18,6 +18,6 @@ public class PutWatchAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/watch/put"; private PutWatchAction() { - super(NAME, PutWatchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index f8d0ade06e022..29f4db51e146e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -32,7 +32,7 @@ public class UpdateWatcherSettingsAction extends ActionType { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java index 80af58d655dda..0a5bf62c73dc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java @@ -15,6 +15,6 @@ public class WatcherServiceAction extends ActionType { public static final String NAME = "cluster:admin/xpack/watcher/service"; private WatcherServiceAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java index 5e102ad446087..1cbbf35101eb2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.stats; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; /** * This ActionType gets the stats for the watcher plugin @@ -18,6 +17,6 @@ public class WatcherStatsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/watcher/stats/dist"; private WatcherStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/RemoteClusterLicenseCheckerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/RemoteClusterLicenseCheckerTests.java index 983666ebb852a..99fe20fb220c5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/RemoteClusterLicenseCheckerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/RemoteClusterLicenseCheckerTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; @@ -146,12 +147,13 @@ public void testCheckRemoteClusterLicensesGivenCompatibleLicenses() { final ThreadPool threadPool = createMockThreadPool(); final Client client = createMockClient(threadPool); + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(responses.get(index.getAndIncrement())); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); final List remoteClusterAliases = Arrays.asList("valid1", "valid2", "valid3"); responses.add(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); @@ -179,7 +181,7 @@ public void onFailure(final Exception e) { }) ); - verify(client, times(3)).execute(same(XPackInfoAction.INSTANCE), any(), any()); + verify(remoteClient, times(3)).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); assertNotNull(licenseCheck.get()); assertTrue(licenseCheck.get().isSuccess()); } @@ -194,12 +196,13 @@ public void testCheckRemoteClusterLicensesGivenIncompatibleLicense() { final ThreadPool threadPool = createMockThreadPool(); final Client client = createMockClient(threadPool); + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(responses.get(index.getAndIncrement())); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); LicensedFeature.Momentary feature = LicensedFeature.momentary(null, "feature", License.OperationMode.PLATINUM); final RemoteClusterLicenseChecker licenseChecker = new RemoteClusterLicenseChecker(client, feature); @@ -222,7 +225,7 @@ public void onFailure(final Exception e) { }) ); - verify(client, times(2)).execute(same(XPackInfoAction.INSTANCE), any(), any()); + verify(remoteClient, times(2)).execute(eq(XPackInfoAction.REMOTE_TYPE), any(), any()); assertNotNull(licenseCheck.get()); assertFalse(licenseCheck.get().isSuccess()); assertThat(licenseCheck.get().remoteClusterLicenseInfo().clusterAlias(), equalTo("cluster-with-basic-license")); @@ -237,12 +240,13 @@ public void testCheckRemoteClusterLicencesGivenNonExistentCluster() { final String failingClusterAlias = randomFrom(remoteClusterAliases); final ThreadPool threadPool = createMockThreadPool(); final Client client = createMockClientThatThrowsOnGetRemoteClusterClient(threadPool, failingClusterAlias); + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(responses.get(index.getAndIncrement())); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); responses.add(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); responses.add(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); @@ -281,13 +285,14 @@ public void testRemoteClusterLicenseCallUsesSystemContext() throws InterruptedEx try { final Client client = createMockClient(threadPool); + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { assertTrue(threadPool.getThreadContext().isSystemContext()); @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(eq(XPackInfoAction.REMOTE_TYPE), any(), any()); LicensedFeature.Momentary feature = LicensedFeature.momentary(null, "feature", License.OperationMode.PLATINUM); final RemoteClusterLicenseChecker licenseChecker = new RemoteClusterLicenseChecker(client, feature); @@ -295,7 +300,7 @@ public void testRemoteClusterLicenseCallUsesSystemContext() throws InterruptedEx final List remoteClusterAliases = Collections.singletonList("valid"); licenseChecker.checkRemoteClusterLicenses(remoteClusterAliases, doubleInvocationProtectingListener(ActionListener.noop())); - verify(client, times(1)).execute(same(XPackInfoAction.INSTANCE), any(), any()); + verify(remoteClient, times(1)).execute(eq(XPackInfoAction.REMOTE_TYPE), any(), any()); } finally { terminate(threadPool); } @@ -316,12 +321,13 @@ public void testListenerIsExecutedWithCallingContext() throws InterruptedExcepti } else { client = createMockClient(threadPool); } + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(responses.get(index.getAndIncrement())); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); responses.add(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); responses.add(new XPackInfoResponse(null, createPlatinumLicenseResponse(), null)); @@ -405,12 +411,13 @@ public void testBuildErrorMessageForInactiveLicense() { public void testCheckRemoteClusterLicencesNoLicenseMetadata() { final ThreadPool threadPool = createMockThreadPool(); final Client client = createMockClient(threadPool); + final RemoteClusterClient remoteClient = client.getRemoteClusterClient("", Runnable::run); doAnswer(invocationMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(new XPackInfoResponse(null, null, null)); return null; - }).when(client).execute(same(XPackInfoAction.INSTANCE), any(), any()); + }).when(remoteClient).execute(same(XPackInfoAction.REMOTE_TYPE), any(), any()); LicensedFeature.Momentary feature = LicensedFeature.momentary(null, "feature", License.OperationMode.PLATINUM); final RemoteClusterLicenseChecker licenseChecker = new RemoteClusterLicenseChecker(client, feature); @@ -470,13 +477,15 @@ private ThreadPool createMockThreadPool() { } private Client createMockClient(final ThreadPool threadPool) { - return createMockClient(threadPool, client -> when(client.getRemoteClusterClient(anyString(), any())).thenReturn(client)); + final var remoteClient = mock(RemoteClusterClient.class); + return createMockClient(threadPool, client -> when(client.getRemoteClusterClient(anyString(), any())).thenReturn(remoteClient)); } private Client createMockClientThatThrowsOnGetRemoteClusterClient(final ThreadPool threadPool, final String clusterAlias) { + final var remoteClient = mock(RemoteClusterClient.class); return createMockClient(threadPool, client -> { when(client.getRemoteClusterClient(eq(clusterAlias), any())).thenThrow(new IllegalArgumentException()); - when(client.getRemoteClusterClient(argThat(a -> not(clusterAlias).matches(a)), any())).thenReturn(client); + when(client.getRemoteClusterClient(argThat(a -> not(clusterAlias).matches(a)), any())).thenReturn(remoteClient); }); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index d7abbe7dbefb4..b906226c7de4f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; @@ -90,16 +89,17 @@ import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; + public class SourceOnlySnapshotShardTests extends IndexShardTestCase { public void testSourceIncomplete() throws IOException { - ShardRouting shardRouting = TestShardRouting.newShardRouting( + ShardRouting shardRouting = shardRoutingBuilder( new ShardId("index", "_na_", 0), randomAlphaOfLength(10), true, - ShardRoutingState.INITIALIZING, - RecoverySource.EmptyStoreRecoverySource.INSTANCE - ); + ShardRoutingState.INITIALIZING + ).withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE).build(); Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) @@ -351,18 +351,19 @@ public void onFailure(Exception e) { assertEquals(copy.getStage(), IndexShardSnapshotStatus.Stage.DONE); } shard.refresh("test"); - ShardRouting shardRouting = TestShardRouting.newShardRouting( + ShardRouting shardRouting = shardRoutingBuilder( new ShardId("index", "_na_", 0), randomAlphaOfLength(10), true, - ShardRoutingState.INITIALIZING, + ShardRoutingState.INITIALIZING + ).withRecoverySource( new RecoverySource.SnapshotRecoverySource( UUIDs.randomBase64UUID(), new Snapshot("src_only", snapshotId), IndexVersion.current(), indexId ) - ); + ).build(); IndexMetadata metadata = runAsSnapshot( threadPool, () -> repository.getSnapshotIndexMetaData( @@ -445,13 +446,12 @@ public void onFailure(Exception e) { } public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throws IOException { - ShardRouting targetShardRouting = TestShardRouting.newShardRouting( + ShardRouting targetShardRouting = shardRoutingBuilder( new ShardId("target", "_na_", 0), randomAlphaOfLength(10), true, - ShardRoutingState.INITIALIZING, - RecoverySource.EmptyStoreRecoverySource.INSTANCE - ); + ShardRoutingState.INITIALIZING + ).withRecoverySource(RecoverySource.EmptyStoreRecoverySource.INSTANCE).build(); Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java index d1ef79f1d61b4..7134ceba475fe 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java @@ -9,7 +9,6 @@ import joptsimple.internal.Strings; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -896,8 +895,7 @@ private static DesiredNode newDesiredNode(String externalId, DiscoveryNodeRole.. .build(), 1, ByteSizeValue.ONE, - ByteSizeValue.ONE, - Version.CURRENT + ByteSizeValue.ONE ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index a383004c12878..78c62d45177b4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -83,6 +83,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHeaderDefinition; +import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.snapshots.Snapshot; @@ -232,6 +233,7 @@ public List getActionFilters() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -242,6 +244,7 @@ public List getRestHandlers( List handlers = new ArrayList<>( super.getRestHandlers( settings, + namedWriteableRegistry, restController, clusterSettings, indexScopedSettings, @@ -254,6 +257,7 @@ public List getRestHandlers( p -> handlers.addAll( p.getRestHandlers( settings, + namedWriteableRegistry, restController, clusterSettings, indexScopedSettings, @@ -381,10 +385,9 @@ public List getBootstrapChecks() { } @Override - public UnaryOperator getRestHandlerInterceptor(ThreadContext threadContext) { - + public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) { // There can be only one. - List> items = filterPlugins(ActionPlugin.class).stream() + List items = filterPlugins(ActionPlugin.class).stream() .filter(RestServerActionPlugin.class::isInstance) .map(RestServerActionPlugin.class::cast) .map(p -> p.getRestHandlerInterceptor(threadContext)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index b457a77e38f01..7595ad7299c3b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.license.License; @@ -48,9 +49,9 @@ public void testDoExecute() throws Exception { LicenseService licenseService = mock(LicenseService.class); NodeClient client = mock(NodeClient.class); - Map featureSets = new HashMap<>(); + Map, FeatureSet> featureSets = new HashMap<>(); int featureSetCount = randomIntBetween(0, 5); - for (XPackInfoFeatureAction infoAction : randomSubsetOf(featureSetCount, XPackInfoFeatureAction.ALL)) { + for (ActionType infoAction : randomSubsetOf(featureSetCount, XPackInfoFeatureAction.ALL)) { FeatureSet featureSet = new FeatureSet(randomAlphaOfLength(5), randomBoolean(), randomBoolean()); featureSets.put(infoAction, featureSet); when(client.executeLocally(eq(infoAction), any(ActionRequest.class), any(ActionListener.class))).thenAnswer(answer -> { @@ -68,7 +69,7 @@ public void testDoExecute() throws Exception { client ) { @Override - protected List infoActions() { + protected List> infoActions() { return new ArrayList<>(featureSets.keySet()); } }; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidatorTests.java index a9643ce099262..9eefad61f943a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidatorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/validation/SourceDestValidatorTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RedirectToLocalClusterRemoteClusterClient; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -162,8 +164,8 @@ private class MockClientLicenseCheck extends NoOpClient { } @Override - public Client getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { - return this; + public RemoteClusterClient getRemoteClusterClient(String clusterAlias, Executor responseExecutor) { + return new RedirectToLocalClusterRemoteClusterClient(this); } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java index de02919b668b6..06a3c0da856aa 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java @@ -30,6 +30,8 @@ import java.util.Collections; import java.util.Map; +import static org.elasticsearch.cluster.routing.TestShardRouting.buildUnassignedInfo; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.allShardsActiveAllocationInfo; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.waitingForActiveShardsAllocationInfo; @@ -414,14 +416,9 @@ public void testExecuteAllocateUnassigned() throws Exception { IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 1), - null, - null, - true, - ShardRoutingState.UNASSIGNED, - TestShardRouting.randomUnassignedInfo("the shard is intentionally unassigned") - ) + shardRoutingBuilder(new ShardId(index, 1), null, true, ShardRoutingState.UNASSIGNED).withUnassignedInfo( + buildUnassignedInfo("the shard is intentionally unassigned") + ).build() ); logger.info( @@ -472,14 +469,9 @@ public void testExecuteReplicasNotAllocatedOnSingleNode() { IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 0), - null, - null, - false, - ShardRoutingState.UNASSIGNED, + shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo( new UnassignedInfo(Reason.REPLICA_ADDED, "no attempt") - ) + ).build() ); AllocationRoutedStep step = createRandomInstance(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java index ca69ec874fd33..68e1bfb117701 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java @@ -33,6 +33,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.hamcrest.Matchers.containsString; public class CheckShrinkReadyStepTests extends AbstractStepTestCase { @@ -300,14 +301,9 @@ public void testExecuteAllocateReplicaUnassigned() { IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 0), - null, - null, - false, - ShardRoutingState.UNASSIGNED, + shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo( randomUnassignedInfo("the shard is intentionally unassigned") - ) + ).build() ); CheckShrinkReadyStep step = createRandomInstance(); @@ -358,14 +354,9 @@ public void testExecuteReplicasNotAllocatedOnSingleNode() { .addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node1", false, ShardRoutingState.STARTED)) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node2", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 0), - null, - null, - false, - ShardRoutingState.UNASSIGNED, + shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo( new UnassignedInfo(UnassignedInfo.Reason.REPLICA_ADDED, "no attempt") - ) + ).build() ); CheckShrinkReadyStep step = createRandomInstance(); @@ -399,14 +390,9 @@ public void testExecuteReplicasButCopiesNotPresent() { .addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node2", false, ShardRoutingState.STARTED)) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node3", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 0), - null, - null, - false, - ShardRoutingState.UNASSIGNED, + shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo( new UnassignedInfo(UnassignedInfo.Reason.REPLICA_ADDED, "no attempt") - ) + ).build() ); CheckShrinkReadyStep step = createRandomInstance(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DataTierMigrationRoutedStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DataTierMigrationRoutedStepTests.java index 17115a79d4ec6..8b05a3156ed04 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DataTierMigrationRoutedStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DataTierMigrationRoutedStepTests.java @@ -29,6 +29,7 @@ import java.util.Collections; import java.util.Set; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.cluster.routing.allocation.DataTier.TIER_PREFERENCE; import static org.elasticsearch.xpack.core.ilm.CheckShrinkReadyStepTests.randomUnassignedInfo; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.waitingForActiveShardsAllocationInfo; @@ -74,14 +75,9 @@ public void testExecuteWithUnassignedShard() { IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index) .addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED)) .addShard( - TestShardRouting.newShardRouting( - new ShardId(index, 1), - null, - null, - true, - ShardRoutingState.UNASSIGNED, + shardRoutingBuilder(new ShardId(index, 1), null, true, ShardRoutingState.UNASSIGNED).withUnassignedInfo( randomUnassignedInfo("the shard is intentionally unassigned") - ) + ).build() ); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java index 962e789cac7d6..b16983c6a7ac6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -70,13 +70,13 @@ public void testPerformActionComplete() throws Exception { Step.StepKey stepKey = randomStepKey(); StepKey nextStepKey = randomStepKey(); int maxNumSegments = randomIntBetween(1, 10); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.OK); Mockito.doAnswer(invocationOnMock -> { ForceMergeRequest request = (ForceMergeRequest) invocationOnMock.getArguments()[0]; assertThat(request.maxNumSegments(), equalTo(maxNumSegments)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(forceMergeResponse); return null; }).when(indicesClient).forceMerge(any(), any()); @@ -95,7 +95,7 @@ public void testPerformActionThrowsException() { Step.StepKey stepKey = randomStepKey(); StepKey nextStepKey = randomStepKey(); int maxNumSegments = randomIntBetween(1, 10); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.OK); Mockito.doAnswer(invocationOnMock -> { ForceMergeRequest request = (ForceMergeRequest) invocationOnMock.getArguments()[0]; @@ -103,7 +103,7 @@ public void testPerformActionThrowsException() { assertThat(request.indices()[0], equalTo(indexMetadata.getIndex().getName())); assertThat(request.maxNumSegments(), equalTo(maxNumSegments)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onFailure(exception); return null; }).when(indicesClient).forceMerge(any(), any()); @@ -126,7 +126,7 @@ public void testForcemergeFailsOnSomeShards() { .numberOfReplicas(randomIntBetween(0, 5)) .build(); Index index = indexMetadata.getIndex(); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getTotalShards()).thenReturn(numberOfShards); Mockito.when(forceMergeResponse.getFailedShards()).thenReturn(numberOfShards - 1); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.BAD_REQUEST); @@ -143,7 +143,7 @@ public void testForcemergeFailsOnSomeShards() { Mockito.doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(forceMergeResponse); return null; }).when(indicesClient).forceMerge(any(), any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java index 92ba5d2ad4efb..db8ac28dd1b98 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.core.ilm; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -100,7 +100,7 @@ public void testPerformAction() throws Exception { Mockito.doAnswer(invocation -> { ResizeRequest request = (ResizeRequest) invocation.getArguments()[0]; @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.getSourceIndex(), equalTo(sourceIndexMetadata.getIndex().getName())); assertThat(request.getTargetIndexRequest().aliases(), equalTo(Collections.emptySet())); @@ -119,7 +119,7 @@ public void testPerformAction() throws Exception { ); } request.setMaxPrimaryShardSize(step.getMaxPrimaryShardSize()); - listener.onResponse(new ResizeResponse(true, true, sourceIndexMetadata.getIndex().getName())); + listener.onResponse(new CreateIndexResponse(true, true, sourceIndexMetadata.getIndex().getName())); return null; }).when(indicesClient).resizeIndex(Mockito.any(), Mockito.any()); @@ -181,8 +181,8 @@ public void testPerformActionIsCompleteForUnAckedRequests() throws Exception { Mockito.doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new ResizeResponse(false, false, indexMetadata.getIndex().getName())); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new CreateIndexResponse(false, false, indexMetadata.getIndex().getName())); return null; }).when(indicesClient).resizeIndex(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java index a369219bd7c3c..6d85e90dc3108 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java @@ -53,7 +53,7 @@ protected Writeable.Reader instanceReader() { @Override protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { - if (version.before(TransportVersions.V_8_500_020)) { + if (version.before(TransportVersions.V_8_9_X)) { instance.setRefreshRequired(true); } return instance; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java index ee304f966c9b4..7f37ff85f1fda 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java @@ -72,7 +72,7 @@ protected Writeable.Reader instanceReader() { @Override protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { - if (version.before(TransportVersions.V_8_500_061)) { + if (version.before(TransportVersions.V_8_10_X)) { return new Request( instance.getModelId(), instance.getDefinition(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtilsTests.java similarity index 75% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtilsTests.java index 23b64aa8be431..4cf9270a1b243 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigUtilsTests.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.core.ml.datafeed.extractor; +package org.elasticsearch.xpack.core.ml.datafeed; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class ExtractorUtilsTests extends ESTestCase { +public class DatafeedConfigUtilsTests extends ESTestCase { public void testGetHistogramAggregation_DateHistogramHasSibling() { AvgAggregationBuilder avg = AggregationBuilders.avg("avg"); @@ -31,7 +31,7 @@ public void testGetHistogramAggregation_DateHistogramHasSibling() { ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> ExtractorUtils.getHistogramAggregation( + () -> DatafeedConfigUtils.getHistogramAggregation( new AggregatorFactories.Builder().addAggregator(avg).addAggregator(dateHistogram).getAggregatorFactories() ) ); @@ -42,7 +42,9 @@ public void testGetHistogramAggregation_DateHistogramHasSibling() { terms.subAggregation(avg); e = expectThrows( ElasticsearchException.class, - () -> ExtractorUtils.getHistogramAggregation(new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()) + () -> DatafeedConfigUtils.getHistogramAggregation( + new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories() + ) ); assertEquals("The date_histogram (or histogram) aggregation cannot have sibling aggregations", e.getMessage()); } @@ -52,20 +54,20 @@ public void testGetHistogramAggregation() { TermsAggregationBuilder nestedTerms = AggregationBuilders.terms("nested_terms"); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time"); - AggregationBuilder histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation( + AggregationBuilder histogramAggregationBuilder = DatafeedConfigUtils.getHistogramAggregation( new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories() ); assertEquals(dateHistogram, histogramAggregationBuilder); dateHistogram.subAggregation(avg).subAggregation(nestedTerms); - histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation( + histogramAggregationBuilder = DatafeedConfigUtils.getHistogramAggregation( new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories() ); assertEquals(dateHistogram, histogramAggregationBuilder); TermsAggregationBuilder toplevelTerms = AggregationBuilders.terms("top_level"); toplevelTerms.subAggregation(dateHistogram); - histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation( + histogramAggregationBuilder = DatafeedConfigUtils.getHistogramAggregation( new AggregatorFactories.Builder().addAggregator(toplevelTerms).getAggregatorFactories() ); @@ -76,7 +78,9 @@ public void testGetHistogramAggregation_MissingHistogramAgg() { TermsAggregationBuilder terms = AggregationBuilders.terms("top_level"); ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> ExtractorUtils.getHistogramAggregation(new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()) + () -> DatafeedConfigUtils.getHistogramAggregation( + new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories() + ) ); assertEquals("A date_histogram (or histogram) aggregation is required", e.getMessage()); } @@ -90,7 +94,7 @@ public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone .subAggregation(maxTime); ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram) + () -> DatafeedConfigUtils.getHistogramIntervalMillis(dateHistogram) ); assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC")); @@ -104,7 +108,7 @@ public void testGetHistogramIntervalMillis_GivenUtcTimeZonesDeprecated() { .fixedInterval(new DateHistogramInterval(300000L + "ms")) .timeZone(zone) .subAggregation(maxTime); - assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); + assertThat(DatafeedConfigUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); } public void testGetHistogramIntervalMillis_GivenUtcTimeZones() { @@ -115,22 +119,22 @@ public void testGetHistogramIntervalMillis_GivenUtcTimeZones() { .fixedInterval(new DateHistogramInterval("300000ms")) .timeZone(zone) .subAggregation(maxTime); - assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); + assertThat(DatafeedConfigUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); } public void testIsHistogram() { - assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.dateHistogram("time"))); - assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.histogram("time"))); - assertFalse(ExtractorUtils.isHistogram(AggregationBuilders.max("time"))); + assertTrue(DatafeedConfigUtils.isHistogram(AggregationBuilders.dateHistogram("time"))); + assertTrue(DatafeedConfigUtils.isHistogram(AggregationBuilders.histogram("time"))); + assertFalse(DatafeedConfigUtils.isHistogram(AggregationBuilders.max("time"))); } public void testValidateAndGetCalendarInterval() { - assertEquals(300 * 1000L, ExtractorUtils.validateAndGetCalendarInterval("5m")); - assertEquals(7200 * 1000L, ExtractorUtils.validateAndGetCalendarInterval("2h")); - assertEquals(86400L * 1000L, ExtractorUtils.validateAndGetCalendarInterval("1d")); + assertEquals(300 * 1000L, DatafeedConfigUtils.validateAndGetCalendarInterval("5m")); + assertEquals(7200 * 1000L, DatafeedConfigUtils.validateAndGetCalendarInterval("2h")); + assertEquals(86400L * 1000L, DatafeedConfigUtils.validateAndGetCalendarInterval("1d")); } public void testValidateAndGetCalendarInterval_intervalIsLongerThanAWeek() { - expectThrows(ElasticsearchException.class, () -> ExtractorUtils.validateAndGetCalendarInterval("8d")); + expectThrows(ElasticsearchException.class, () -> DatafeedConfigUtils.validateAndGetCalendarInterval("8d")); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java index 3ddfcfe76ce25..368823d0f64af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java @@ -6,13 +6,14 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.Cardinality; -import org.elasticsearch.search.aggregations.metrics.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import java.util.Collections; import java.util.List; @@ -23,72 +24,72 @@ public final class MockAggregations { - public static Terms mockTerms(String name) { + public static StringTerms mockTerms(String name) { return mockTerms(name, Collections.emptyList(), 0); } - public static Terms mockTerms(String name, List buckets, long sumOfOtherDocCounts) { - Terms agg = mock(Terms.class); + public static StringTerms mockTerms(String name, List buckets, long sumOfOtherDocCounts) { + StringTerms agg = mock(StringTerms.class); when(agg.getName()).thenReturn(name); doReturn(buckets).when(agg).getBuckets(); when(agg.getSumOfOtherDocCounts()).thenReturn(sumOfOtherDocCounts); return agg; } - public static Terms.Bucket mockTermsBucket(String key, Aggregations subAggs) { - Terms.Bucket bucket = mock(Terms.Bucket.class); + public static StringTerms.Bucket mockTermsBucket(String key, InternalAggregations subAggs) { + StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); when(bucket.getKeyAsString()).thenReturn(key); when(bucket.getAggregations()).thenReturn(subAggs); return bucket; } - public static Filters mockFilters(String name) { + public static InternalFilters mockFilters(String name) { return mockFilters(name, Collections.emptyList()); } - public static Filters mockFilters(String name, List buckets) { - Filters agg = mock(Filters.class); + public static InternalFilters mockFilters(String name, List buckets) { + InternalFilters agg = mock(InternalFilters.class); when(agg.getName()).thenReturn(name); doReturn(buckets).when(agg).getBuckets(); return agg; } - public static Filters.Bucket mockFiltersBucket(String key, long docCount, Aggregations subAggs) { - Filters.Bucket bucket = mockFiltersBucket(key, docCount); + public static InternalFilters.InternalBucket mockFiltersBucket(String key, long docCount, InternalAggregations subAggs) { + InternalFilters.InternalBucket bucket = mockFiltersBucket(key, docCount); when(bucket.getAggregations()).thenReturn(subAggs); return bucket; } - public static Filters.Bucket mockFiltersBucket(String key, long docCount) { - Filters.Bucket bucket = mock(Filters.Bucket.class); + public static InternalFilters.InternalBucket mockFiltersBucket(String key, long docCount) { + InternalFilters.InternalBucket bucket = mock(InternalFilters.InternalBucket.class); when(bucket.getKeyAsString()).thenReturn(key); when(bucket.getDocCount()).thenReturn(docCount); return bucket; } - public static Filter mockFilter(String name, long docCount) { - Filter agg = mock(Filter.class); + public static InternalFilter mockFilter(String name, long docCount) { + InternalFilter agg = mock(InternalFilter.class); when(agg.getName()).thenReturn(name); when(agg.getDocCount()).thenReturn(docCount); return agg; } - public static NumericMetricsAggregation.SingleValue mockSingleValue(String name, double value) { - NumericMetricsAggregation.SingleValue agg = mock(NumericMetricsAggregation.SingleValue.class); + public static InternalNumericMetricsAggregation.SingleValue mockSingleValue(String name, double value) { + InternalNumericMetricsAggregation.SingleValue agg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(agg.getName()).thenReturn(name); when(agg.value()).thenReturn(value); return agg; } - public static Cardinality mockCardinality(String name, long value) { - Cardinality agg = mock(Cardinality.class); + public static InternalCardinality mockCardinality(String name, long value) { + InternalCardinality agg = mock(InternalCardinality.class); when(agg.getName()).thenReturn(name); when(agg.getValue()).thenReturn(value); return agg; } - public static ExtendedStats mockExtendedStats(String name, double variance, long count) { - ExtendedStats agg = mock(ExtendedStats.class); + public static InternalExtendedStats mockExtendedStats(String name, double variance, long count) { + InternalExtendedStats agg = mock(InternalExtendedStats.class); when(agg.getName()).thenReturn(name); when(agg.getVariance()).thenReturn(variance); when(agg.getCount()).thenReturn(count); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java index 18663b77d9ca0..3bf1ff171e422 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; @@ -64,11 +64,14 @@ public static Accuracy createRandom() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( "accuracy_" + MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, - List.of(mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), + List.of( + mockTermsBucket("dog", InternalAggregations.from(List.of())), + mockTermsBucket("cat", InternalAggregations.from(List.of())) + ), 100L ), mockCardinality("accuracy_" + MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 1000L), @@ -78,7 +81,7 @@ public void testProcess() { mockFiltersBucket( "dog", 30, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -94,7 +97,7 @@ public void testProcess() { mockFiltersBucket( "cat", 70, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -125,11 +128,14 @@ public void testProcess() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( "accuracy_" + MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, - List.of(mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), + List.of( + mockTermsBucket("dog", InternalAggregations.from(List.of())), + mockTermsBucket("cat", InternalAggregations.from(List.of())) + ), 100L ), mockCardinality("accuracy_" + MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 1001L), @@ -139,7 +145,7 @@ public void testProcess_GivenCardinalityTooHigh() { mockFiltersBucket( "dog", 30, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -155,7 +161,7 @@ public void testProcess_GivenCardinalityTooHigh() { mockFiltersBucket( "cat", 70, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java index cc101626667b2..b797961e58b33 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -65,7 +65,7 @@ protected NamedXContentRegistry xContentRegistry() { public static Classification createRandom() { List metrics = randomSubsetOf( Arrays.asList( - AccuracyTests.createRandom(), + // AccuracyTests.createRandom(), AucRocTests.createRandom(), PrecisionTests.createRandom(), RecallTests.createRandom(), @@ -341,7 +341,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result != null) { return; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java index cb3455b3d5b2e..e8e71b8721c26 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -101,11 +101,14 @@ public void testAggs() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, - List.of(mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), + List.of( + mockTermsBucket("dog", InternalAggregations.from(List.of())), + mockTermsBucket("cat", InternalAggregations.from(List.of())) + ), 0L ), mockCardinality(MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 2L), @@ -115,7 +118,7 @@ public void testProcess() { mockFiltersBucket( "dog", 30, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -131,7 +134,7 @@ public void testProcess() { mockFiltersBucket( "cat", 70, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -168,11 +171,14 @@ public void testProcess() { } public void testProcess_OtherClassesCountGreaterThanZero() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, - List.of(mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), + List.of( + mockTermsBucket("dog", InternalAggregations.from(List.of())), + mockTermsBucket("cat", InternalAggregations.from(List.of())) + ), 100L ), mockCardinality(MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 5L), @@ -182,7 +188,7 @@ public void testProcess_OtherClassesCountGreaterThanZero() { mockFiltersBucket( "dog", 30, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -198,7 +204,7 @@ public void testProcess_OtherClassesCountGreaterThanZero() { mockFiltersBucket( "cat", 85, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -235,22 +241,22 @@ public void testProcess_OtherClassesCountGreaterThanZero() { } public void testProcess_MoreThanTwoStepsNeeded() { - Aggregations aggsStep1 = new Aggregations( + InternalAggregations aggsStep1 = InternalAggregations.from( List.of( mockTerms( MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, List.of( - mockTermsBucket("ant", new Aggregations(List.of())), - mockTermsBucket("cat", new Aggregations(List.of())), - mockTermsBucket("dog", new Aggregations(List.of())), - mockTermsBucket("fox", new Aggregations(List.of())) + mockTermsBucket("ant", InternalAggregations.from(List.of())), + mockTermsBucket("cat", InternalAggregations.from(List.of())), + mockTermsBucket("dog", InternalAggregations.from(List.of())), + mockTermsBucket("fox", InternalAggregations.from(List.of())) ), 0L ), mockCardinality(MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 2L) ) ); - Aggregations aggsStep2 = new Aggregations( + InternalAggregations aggsStep2 = InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, @@ -258,7 +264,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { mockFiltersBucket( "ant", 46, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -276,7 +282,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { mockFiltersBucket( "cat", 86, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -295,7 +301,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { ) ) ); - Aggregations aggsStep3 = new Aggregations( + InternalAggregations aggsStep3 = InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, @@ -303,7 +309,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { mockFiltersBucket( "dog", 126, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, @@ -321,7 +327,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { mockFiltersBucket( "fox", 166, - new Aggregations( + InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java index d4261d81fea2c..f44efff28c034 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; @@ -63,7 +63,7 @@ public static Precision createRandom() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Precision.ACTUAL_CLASSES_NAMES_AGG_NAME), mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), @@ -81,7 +81,7 @@ public void testProcess() { public void testProcess_GivenMissingAgg() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Precision precision = new Precision(); @@ -89,7 +89,7 @@ public void testProcess_GivenMissingAgg() { assertThat(precision.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue(Precision.AVG_PRECISION_AGG_NAME, 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377) @@ -103,7 +103,7 @@ public void testProcess_GivenMissingAgg() { public void testProcess_GivenAggOfWrongType() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), mockFilters(Precision.AVG_PRECISION_AGG_NAME)) ); Precision precision = new Precision(); @@ -111,7 +111,7 @@ public void testProcess_GivenAggOfWrongType() { assertThat(precision.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue(Precision.BY_PREDICTED_CLASS_AGG_NAME, 1.0), mockSingleValue(Precision.AVG_PRECISION_AGG_NAME, 0.8123) @@ -124,7 +124,7 @@ public void testProcess_GivenAggOfWrongType() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Collections.singletonList(mockTerms(Precision.ACTUAL_CLASSES_NAMES_AGG_NAME, Collections.emptyList(), 1)) ); Precision precision = new Precision(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java index 5f446083612df..8ba6e48082b71 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; @@ -62,7 +62,7 @@ public static Recall createRandom() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123), @@ -79,7 +79,7 @@ public void testProcess() { public void testProcess_GivenMissingAgg() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Recall recall = new Recall(); @@ -87,7 +87,7 @@ public void testProcess_GivenMissingAgg() { assertThat(recall.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Recall recall = new Recall(); @@ -98,7 +98,7 @@ public void testProcess_GivenMissingAgg() { public void testProcess_GivenAggOfWrongType() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockTerms(Recall.AVG_RECALL_AGG_NAME)) ); Recall recall = new Recall(); @@ -106,7 +106,7 @@ public void testProcess_GivenAggOfWrongType() { assertThat(recall.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue(Recall.BY_ACTUAL_CLASS_AGG_NAME, 1.0), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123)) ); Recall recall = new Recall(); @@ -116,7 +116,7 @@ public void testProcess_GivenAggOfWrongType() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME, Collections.emptyList(), 1), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java index acbd647f7bfa2..1557bd71f98b5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static ConfusionMatrix createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("confusion_matrix_at_0.25_TP", 1L), mockFilter("confusion_matrix_at_0.25_FP", 2L), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java index 299aa76f05fde..bc198eaf3c7db 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static Precision createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("precision_at_0.25_TP", 1L), mockFilter("precision_at_0.25_FP", 4L), @@ -73,7 +73,9 @@ public void testEvaluate() { } public void testEvaluate_GivenZeroTpAndFp() { - Aggregations aggs = new Aggregations(Arrays.asList(mockFilter("precision_at_1.0_TP", 0L), mockFilter("precision_at_1.0_FP", 0L))); + InternalAggregations aggs = InternalAggregations.from( + Arrays.asList(mockFilter("precision_at_1.0_TP", 0L), mockFilter("precision_at_1.0_FP", 0L)) + ); Precision precision = new Precision(Arrays.asList(1.0)); EvaluationMetricResult result = precision.evaluate(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java index fb4ab46675eca..569b73417414e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static Recall createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("recall_at_0.25_TP", 1L), mockFilter("recall_at_0.25_FN", 4L), @@ -73,7 +73,9 @@ public void testEvaluate() { } public void testEvaluate_GivenZeroTpAndFp() { - Aggregations aggs = new Aggregations(Arrays.asList(mockFilter("recall_at_1.0_TP", 0L), mockFilter("recall_at_1.0_FN", 0L))); + InternalAggregations aggs = InternalAggregations.from( + Arrays.asList(mockFilter("recall_at_1.0_TP", 0L), mockFilter("recall_at_1.0_FN", 0L)) + ); Recall recall = new Recall(Arrays.asList(1.0)); EvaluationMetricResult result = recall.evaluate(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java index 8e7f4ddd36253..4a8485e8d138f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static Huber createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_huber", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); Huber huber = new Huber((Double) null); huber.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java index c6c0d00dd240f..551a5f017c120 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static MeanSquaredError createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_mse", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); MeanSquaredError mse = new MeanSquaredError(); mse.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java index beb39e46fa5f1..d2bb30fb169b1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static MeanSquaredLogarithmicError createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_msle", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); MeanSquaredLogarithmicError msle = new MeanSquaredLogarithmicError((Double) null); msle.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java index 644979379703c..710810d2d168e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -48,7 +48,7 @@ public static RSquared createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 10_111), mockExtendedStats("extended_stats_actual", 155.23, 1000), @@ -66,7 +66,7 @@ public void testEvaluate() { } public void testEvaluateWithZeroCount() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 0), mockExtendedStats("extended_stats_actual", 0.0, 0), @@ -83,7 +83,7 @@ public void testEvaluateWithZeroCount() { } public void testEvaluateWithSingleCountZeroVariance() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 1), mockExtendedStats("extended_stats_actual", 0.0, 1), @@ -100,7 +100,9 @@ public void testEvaluateWithSingleCountZeroVariance() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + (Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))) + ); RSquared rSquared = new RSquared(); rSquared.process(aggs); @@ -110,7 +112,7 @@ public void testEvaluate_GivenMissingAggs() { } public void testEvaluate_GivenMissingExtendedStatsAgg() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("some_other_single_metric_agg", 0.2377), mockSingleValue("residual_sum_of_squares", 0.2377)) ); @@ -122,7 +124,7 @@ public void testEvaluate_GivenMissingExtendedStatsAgg() { } public void testEvaluate_GivenMissingResidualSumOfSquaresAgg() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("some_other_single_metric_agg", 0.2377), mockExtendedStats("extended_stats_actual", 100, 50)) ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java index 5622914c2981c..c900db5646ba8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java @@ -460,7 +460,7 @@ protected TrainedModelConfig mutateInstanceForVersion(TrainedModelConfig instanc if (instance.getInferenceConfig() instanceof NlpConfig nlpConfig) { builder.setInferenceConfig(InferenceConfigItemTestCase.mutateForVersion(nlpConfig, version)); } - if (version.before(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + if (version.before(TransportVersions.V_8_11_X)) { builder.setPlatformArchitecture(null); } if (version.before(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java index 79c069afbd4ab..9253469ecc49d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java @@ -63,6 +63,13 @@ protected BertJapaneseTokenization mutateInstanceForVersion(BertJapaneseTokeniza return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new BertJapaneseTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static BertJapaneseTokenization createRandom() { return new BertJapaneseTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java index a00ebec79a862..b9cda9a2068ea 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java @@ -63,6 +63,13 @@ protected BertTokenization mutateInstanceForVersion(BertTokenization instance, T return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new BertTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static BertTokenization createRandom() { return new BertTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java index 1d52deaafa719..620036a040368 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java @@ -59,11 +59,11 @@ public void testFromMapWithUnknownField() { public void testApply() { ClassificationConfig originalConfig = randomClassificationConfig(); - assertThat(originalConfig, equalTo(ClassificationConfigUpdate.EMPTY_PARAMS.apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(ClassificationConfigUpdate.EMPTY_PARAMS))); assertThat( new ClassificationConfig.Builder(originalConfig).setNumTopClasses(5).build(), - equalTo(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build())) ); assertThat( new ClassificationConfig.Builder().setNumTopClasses(5) @@ -73,13 +73,14 @@ public void testApply() { .setTopClassesResultsField("bar") .build(), equalTo( - new ClassificationConfigUpdate.Builder().setNumTopClasses(5) - .setNumTopFeatureImportanceValues(1) - .setPredictionFieldType(PredictionFieldType.BOOLEAN) - .setResultsField("foo") - .setTopClassesResultsField("bar") - .build() - .apply(originalConfig) + originalConfig.apply( + new ClassificationConfigUpdate.Builder().setNumTopClasses(5) + .setNumTopFeatureImportanceValues(1) + .setPredictionFieldType(PredictionFieldType.BOOLEAN) + .setResultsField("foo") + .setTopClassesResultsField("bar") + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java index 40eb9a4afd35f..385f5b1ddbf83 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java @@ -62,39 +62,19 @@ FillMaskConfigUpdate fromMap(Map map) { return FillMaskConfigUpdate.fromMap(map); } - public void testIsNoop() { - assertTrue(new FillMaskConfigUpdate.Builder().build().isNoop(FillMaskConfigTests.createRandom())); - - assertFalse( - new FillMaskConfigUpdate.Builder().setResultsField("foo") - .build() - .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build()) - ); - - assertFalse( - new FillMaskConfigUpdate.Builder().setTokenizationUpdate(new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null)) - .build() - .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build()) - ); - - assertTrue( - new FillMaskConfigUpdate.Builder().setNumTopClasses(3).build().isNoop(new FillMaskConfig.Builder().setNumTopClasses(3).build()) - ); - } - public void testApply() { FillMaskConfig originalConfig = FillMaskConfigTests.createRandom(); - assertThat(originalConfig, equalTo(new FillMaskConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().build()))); assertThat( new FillMaskConfig.Builder(originalConfig).setResultsField("ml-results").build(), - equalTo(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build())) ); assertThat( new FillMaskConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 1).build(), equalTo( - new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build().apply(originalConfig) + originalConfig.apply(new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build()) ) ); @@ -103,9 +83,11 @@ public void testApply() { assertThat( new FillMaskConfig.Builder(originalConfig).setTokenization(tokenization).build(), equalTo( - new FillMaskConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new FillMaskConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java index bf1d74f044c1e..825313213f24d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java @@ -125,7 +125,7 @@ protected ModelPackageConfig mutateInstance(ModelPackageConfig instance) { @Override protected ModelPackageConfig mutateInstanceForVersion(ModelPackageConfig instance, TransportVersion version) { var builder = new ModelPackageConfig.Builder(instance); - if (version.before(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED)) { + if (version.before(TransportVersions.V_8_11_X)) { builder.setPlatformArchitecture(null); } if (version.before(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java index eb2afa501a4cc..72ba9fa5ba540 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class NerConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,7 +60,7 @@ NerConfigUpdate fromMap(Map map) { public void testApply() { NerConfig originalConfig = NerConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new NerConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new NerConfigUpdate.Builder().build()))); assertThat( new NerConfig( @@ -70,7 +69,7 @@ public void testApply() { originalConfig.getClassificationLabels(), "ml-results" ), - equalTo(new NerConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new NerConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -83,9 +82,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new NerConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new NerConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java index 9cbf73dfe4809..caec28a93e5a3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class PassThroughConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,11 +60,11 @@ PassThroughConfigUpdate fromMap(Map map) { public void testApply() { PassThroughConfig originalConfig = PassThroughConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new PassThroughConfigUpdate.Builder().build().apply(originalConfig))); + assertEquals(originalConfig, originalConfig.apply(new PassThroughConfigUpdate.Builder().build())); assertThat( new PassThroughConfig(originalConfig.getVocabularyConfig(), originalConfig.getTokenization(), "ml-results"), - equalTo(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -73,9 +72,11 @@ public void testApply() { assertThat( new PassThroughConfig(originalConfig.getVocabularyConfig(), tokenization, originalConfig.getResultsField()), equalTo( - new PassThroughConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new PassThroughConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java index 46f11e7c5f793..e787b770b5da5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java @@ -122,11 +122,12 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setNumTopClasses(4) - .setMaxAnswerLength(40) - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") + .setNumTopClasses(4) + .setMaxAnswerLength(40) + .build() + ) ) ); assertThat( @@ -139,10 +140,9 @@ public void testApply() { "updated-field" ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setResultsField("updated-field") - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?").setResultsField("updated-field").build() + ) ) ); @@ -158,10 +158,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") + .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java index 4c60ca7f885c4..35d2cb7fda16f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java @@ -53,19 +53,18 @@ public void testFromMapWithUnknownField() { public void testApply() { RegressionConfig originalConfig = randomRegressionConfig(); - assertThat(originalConfig, equalTo(RegressionConfigUpdate.EMPTY_PARAMS.apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(RegressionConfigUpdate.EMPTY_PARAMS))); assertThat( new RegressionConfig.Builder(originalConfig).setNumTopFeatureImportanceValues(5).build(), - equalTo(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build().apply(originalConfig)) + equalTo(originalConfig.apply(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build())) ); assertThat( new RegressionConfig.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build(), equalTo( - new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1) - .setResultsField("foo") - .build() - .apply(originalConfig) + originalConfig.apply( + new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java index 4237458d01f63..9accabb788669 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java @@ -44,7 +44,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() { ClassificationConfig config = ClassificationConfigTests.randomClassificationConfig(); String newResultsField = config.getResultsField() + "foobar"; ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField); - InferenceConfig applied = update.apply(config); + InferenceConfig applied = config.apply(update); assertThat(applied, instanceOf(ClassificationConfig.class)); ClassificationConfig appliedConfig = (ClassificationConfig) applied; @@ -55,7 +55,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() { RegressionConfig config = RegressionConfigTests.randomRegressionConfig(); String newResultsField = config.getResultsField() + "foobar"; ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField); - InferenceConfig applied = update.apply(config); + InferenceConfig applied = config.apply(update); assertThat(applied, instanceOf(RegressionConfig.class)); RegressionConfig appliedConfig = (RegressionConfig) applied; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java index 4f2c167015816..8cedd20432a6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java @@ -63,6 +63,13 @@ protected RobertaTokenization mutateInstanceForVersion(RobertaTokenization insta return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new RobertaTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static RobertaTokenization createRandom() { return new RobertaTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java index 72d963da8f0be..25b4299b41e8e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java @@ -79,38 +79,6 @@ TextClassificationConfigUpdate fromMap(Map map) { return TextClassificationConfigUpdate.fromMap(map); } - public void testIsNoop() { - assertTrue(new TextClassificationConfigUpdate.Builder().build().isNoop(TextClassificationConfigTests.createRandom())); - - assertFalse( - new TextClassificationConfigUpdate.Builder().setResultsField("foo") - .build() - .isNoop( - new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b")) - .setNumTopClasses(-1) - .setResultsField("bar") - .build() - ) - ); - - assertTrue( - new TextClassificationConfigUpdate.Builder().setNumTopClasses(3) - .build() - .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b")).setNumTopClasses(3).build()) - ); - assertFalse( - new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("a", "b")) - .build() - .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build()) - ); - assertFalse( - new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( - new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null) - ).build().isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build()) - ); - - } - public void testApply() { TextClassificationConfig originalConfig = new TextClassificationConfig( VocabularyConfigTests.createRandom(), @@ -120,24 +88,24 @@ public void testApply() { "foo-results" ); - assertThat(originalConfig, equalTo(new TextClassificationConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().build()))); assertThat( new TextClassificationConfig.Builder(originalConfig).setClassificationLabels(List.of("foo", "bar")).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build().apply(originalConfig) + originalConfig.apply(new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build()) ) ); assertThat( new TextClassificationConfig.Builder(originalConfig).setResultsField("ml-results").build(), - equalTo(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build())) ); assertThat( new TextClassificationConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 2).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2) - .build() - .apply(originalConfig) + originalConfig.apply( + new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2).build() + ) ) ); @@ -146,9 +114,11 @@ public void testApply() { assertThat( new TextClassificationConfig.Builder(originalConfig).setTokenization(tokenization).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } @@ -161,7 +131,7 @@ public void testApplyWithInvalidLabels() { var update = new TextClassificationConfigUpdate.Builder().setClassificationLabels(newLabels).build(); - ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> update.apply(originalConfig)); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> originalConfig.apply(update)); assertThat( e.getMessage(), containsString( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java index 06abb12bdb0a2..ecff9c1010c46 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class TextEmbeddingConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,7 +60,7 @@ TextEmbeddingConfigUpdate fromMap(Map map) { public void testApply() { TextEmbeddingConfig originalConfig = TextEmbeddingConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new TextEmbeddingConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().build()))); assertThat( new TextEmbeddingConfig( @@ -70,7 +69,7 @@ public void testApply() { "ml-results", originalConfig.getEmbeddingSize() ), - equalTo(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -83,9 +82,11 @@ public void testApply() { originalConfig.getEmbeddingSize() ), equalTo( - new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java index b8b5f50d15bae..e5061a743c672 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java @@ -127,7 +127,7 @@ public void testApply() { originalConfig.getResultsField(), originalConfig.getSpanScoreFunction() ), - equalTo(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build())) ); assertThat( new TextSimilarityConfig( @@ -138,10 +138,9 @@ public void testApply() { originalConfig.getSpanScoreFunction() ), equalTo( - new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") - .setResultsField("updated-field") - .build() - .apply(originalConfig) + originalConfig.apply( + new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").setResultsField("updated-field").build() + ) ) ); @@ -156,10 +155,11 @@ public void testApply() { originalConfig.getSpanScoreFunction() ), equalTo( - new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") - .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) - .build() - .apply(originalConfig) + originalConfig.apply( + new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") + .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java new file mode 100644 index 0000000000000..431dcf6c8c769 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; + +public class TokenizationConfigUpdateTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return TokenizationConfigUpdate::new; + } + + @Override + protected TokenizationConfigUpdate createTestInstance() { + Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); + int span = randomIntBetween(8, 16); + return new TokenizationConfigUpdate(new Tokenization.SpanSettings(maxSequenceLength, span)); + } + + @Override + protected TokenizationConfigUpdate mutateInstance(TokenizationConfigUpdate instance) throws IOException { + return null; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java index 09c8eed048d96..ed034bb8518d9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java @@ -108,7 +108,7 @@ public void testApply() { randomBoolean() ? null : randomAlphaOfLength(8) ); - assertThat(originalConfig, equalTo(new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build()))); assertThat( new ZeroShotClassificationConfig( @@ -120,7 +120,7 @@ public void testApply() { List.of("foo", "bar"), originalConfig.getResultsField() ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build())) ); assertThat( new ZeroShotClassificationConfig( @@ -132,7 +132,7 @@ public void testApply() { originalConfig.getLabels().orElse(null), originalConfig.getResultsField() ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build())) ); assertThat( new ZeroShotClassificationConfig( @@ -144,7 +144,7 @@ public void testApply() { originalConfig.getLabels().orElse(null), "updated-field" ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -160,9 +160,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } @@ -178,41 +180,13 @@ public void testApplyWithEmptyLabelsInConfigAndUpdate() { null ); - Exception ex = expectThrows(Exception.class, () -> new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig)); + Exception ex = expectThrows(Exception.class, () -> originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build())); assertThat( ex.getMessage(), containsString("stored configuration has no [labels] defined, supplied inference_config update must supply [labels]") ); } - public void testIsNoop() { - assertTrue(new ZeroShotClassificationConfigUpdate.Builder().build().isNoop(ZeroShotClassificationConfigTests.createRandom())); - - var originalConfig = new ZeroShotClassificationConfig( - List.of("contradiction", "neutral", "entailment"), - randomBoolean() ? null : VocabularyConfigTests.createRandom(), - randomBoolean() ? null : BertTokenizationTests.createRandom(), - randomAlphaOfLength(10), - randomBoolean(), - null, - randomBoolean() ? null : randomAlphaOfLength(8) - ); - - var update = new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("glad", "sad", "mad")).build(); - assertFalse(update.isNoop(originalConfig)); - - originalConfig = new ZeroShotClassificationConfig( - List.of("contradiction", "neutral", "entailment"), - randomBoolean() ? null : VocabularyConfigTests.createRandom(), - randomBoolean() ? null : BertTokenizationTests.createRandom(), - randomAlphaOfLength(10), - randomBoolean(), - List.of("glad", "sad", "mad"), - randomBoolean() ? null : randomAlphaOfLength(8) - ); - assertTrue(update.isNoop(originalConfig)); - } - public static ZeroShotClassificationConfigUpdate createRandom() { return randomUpdate(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeySerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeySerializationTests.java index d2a02cd053ca1..8e6cdb2042811 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeySerializationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeySerializationTests.java @@ -25,7 +25,7 @@ public class ApiKeySerializationTests extends AbstractWireSerializingTestCase { public void testSerializationBackwardsCompatibility() throws IOException { BulkUpdateApiKeyRequest testInstance = createTestInstance(); - BulkUpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_500_064); + BulkUpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); try { // Transport is on a version before expiration was introduced, so should always be null assertThat(deserializedInstance.getExpiration(), nullValue()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequestTests.java index 6c7df3d4db80c..d1f6e4da3045e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequestTests.java @@ -7,88 +7,13 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.searchafter.SearchAfterBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.util.List; - import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class QueryApiKeyRequestTests extends ESTestCase { - @Override - protected NamedWriteableRegistry writableRegistry() { - final SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of()); - return new NamedWriteableRegistry(searchModule.getNamedWriteables()); - } - - public void testReadWrite() throws IOException { - final QueryApiKeyRequest request1 = new QueryApiKeyRequest(); - try (BytesStreamOutput out = new BytesStreamOutput()) { - request1.writeTo(out); - try (StreamInput in = new InputStreamStreamInput(new ByteArrayInputStream(out.bytes().array()))) { - assertThat(new QueryApiKeyRequest(in).getQueryBuilder(), nullValue()); - } - } - - final BoolQueryBuilder boolQueryBuilder2 = QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("foo", "bar")) - .should(QueryBuilders.idsQuery().addIds("id1", "id2")) - .must(QueryBuilders.wildcardQuery("a.b", "t*y")) - .mustNot(QueryBuilders.prefixQuery("value", "prod")); - final QueryApiKeyRequest request2 = new QueryApiKeyRequest(boolQueryBuilder2); - try (BytesStreamOutput out = new BytesStreamOutput()) { - request2.writeTo(out); - try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { - final QueryApiKeyRequest deserialized = new QueryApiKeyRequest(in); - assertThat(deserialized.getQueryBuilder().getClass(), is(BoolQueryBuilder.class)); - assertThat((BoolQueryBuilder) deserialized.getQueryBuilder(), equalTo(boolQueryBuilder2)); - } - } - - final QueryApiKeyRequest request3 = new QueryApiKeyRequest( - QueryBuilders.matchAllQuery(), - 42, - 20, - List.of( - new FieldSortBuilder("name"), - new FieldSortBuilder("creation_time").setFormat("strict_date_time").order(SortOrder.DESC), - new FieldSortBuilder("username") - ), - new SearchAfterBuilder().setSortValues(new String[] { "key-2048", "2021-07-01T00:00:59.000Z" }), - randomBoolean() - ); - try (BytesStreamOutput out = new BytesStreamOutput()) { - request3.writeTo(out); - try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { - final QueryApiKeyRequest deserialized = new QueryApiKeyRequest(in); - assertThat(deserialized.getQueryBuilder().getClass(), is(MatchAllQueryBuilder.class)); - assertThat(deserialized.getFrom(), equalTo(request3.getFrom())); - assertThat(deserialized.getSize(), equalTo(request3.getSize())); - assertThat(deserialized.getFieldSortBuilders(), equalTo(request3.getFieldSortBuilders())); - assertThat(deserialized.getSearchAfterBuilder(), equalTo(request3.getSearchAfterBuilder())); - assertThat(deserialized.withLimitedBy(), equalTo(request3.withLimitedBy())); - } - } - } - public void testValidate() { final QueryApiKeyRequest request1 = new QueryApiKeyRequest( null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponseTests.java deleted file mode 100644 index 677d2201fe1e1..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponseTests.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; - -import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; - -public class QueryApiKeyResponseTests extends AbstractWireSerializingTestCase { - - @Override - protected Writeable.Reader instanceReader() { - return QueryApiKeyResponse::new; - } - - @Override - protected QueryApiKeyResponse createTestInstance() { - final List items = randomList(0, 3, this::randomItem); - return new QueryApiKeyResponse(randomIntBetween(items.size(), 100), items); - } - - @Override - protected QueryApiKeyResponse mutateInstance(QueryApiKeyResponse instance) { - final List items = Arrays.stream(instance.getItems()).collect(Collectors.toCollection(ArrayList::new)); - switch (randomIntBetween(0, 3)) { - case 0: - items.add(randomItem()); - return new QueryApiKeyResponse(instance.getTotal(), items); - case 1: - if (false == items.isEmpty()) { - return new QueryApiKeyResponse(instance.getTotal(), items.subList(1, items.size())); - } else { - items.add(randomItem()); - return new QueryApiKeyResponse(instance.getTotal(), items); - } - case 2: - if (false == items.isEmpty()) { - final int index = randomIntBetween(0, items.size() - 1); - items.set(index, randomItem()); - } else { - items.add(randomItem()); - } - return new QueryApiKeyResponse(instance.getTotal(), items); - default: - return new QueryApiKeyResponse(instance.getTotal() + 1, items); - } - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry( - List.of( - new NamedWriteableRegistry.Entry( - ConfigurableClusterPrivilege.class, - ConfigurableClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, - ConfigurableClusterPrivileges.ManageApplicationPrivileges::createFrom - ), - new NamedWriteableRegistry.Entry( - ConfigurableClusterPrivilege.class, - ConfigurableClusterPrivileges.WriteProfileDataPrivileges.WRITEABLE_NAME, - ConfigurableClusterPrivileges.WriteProfileDataPrivileges::createFrom - ) - ) - ); - } - - private QueryApiKeyResponse.Item randomItem() { - return new QueryApiKeyResponse.Item(randomApiKeyInfo(), randomSortValues()); - } - - private ApiKey randomApiKeyInfo() { - final String name = randomAlphaOfLengthBetween(3, 8); - final String id = randomAlphaOfLength(22); - final ApiKey.Type type = randomFrom(ApiKey.Type.values()); - final String username = randomAlphaOfLengthBetween(3, 8); - final String realm_name = randomAlphaOfLengthBetween(3, 8); - final Instant creation = Instant.ofEpochMilli(randomMillisUpToYear9999()); - final Instant expiration = randomBoolean() ? Instant.ofEpochMilli(randomMillisUpToYear9999()) : null; - final Map metadata = ApiKeyTests.randomMetadata(); - final List roleDescriptors = type == ApiKey.Type.CROSS_CLUSTER - ? List.of(randomCrossClusterAccessRoleDescriptor()) - : randomFrom(randomUniquelyNamedRoleDescriptors(0, 3), null); - return new ApiKey( - name, - id, - type, - creation, - expiration, - false, - null, - username, - realm_name, - metadata, - roleDescriptors, - type == ApiKey.Type.CROSS_CLUSTER ? null : randomUniquelyNamedRoleDescriptors(1, 3) - ); - } - - private Object[] randomSortValues() { - if (randomBoolean()) { - return null; - } else { - return randomArray(1, 3, Object[]::new, () -> randomFrom(42, 42L, "key-1", "2021-01-01T00:00:00.177Z", randomBoolean())); - } - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java index be1e69d4d30e8..83d74b7e9d413 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java @@ -23,7 +23,7 @@ public class UpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { public void testSerializationBackwardsCompatibility() throws IOException { UpdateApiKeyRequest testInstance = createTestInstance(); - UpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_500_064); + UpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); try { // Transport is on a version before expiration was introduced, so should always be null assertThat(deserializedInstance.getExpiration(), nullValue()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java new file mode 100644 index 0000000000000..e7d8ef0b65e39 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.nullValue; + +public class QueryUserRequestTests extends ESTestCase { + public void testValidate() { + final QueryUserRequest request1 = new QueryUserRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request1.validate(), nullValue()); + + final QueryUserRequest request2 = new QueryUserRequest( + null, + randomIntBetween(Integer.MIN_VALUE, -1), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request2.validate().getMessage(), containsString("[from] parameter cannot be negative")); + + final QueryUserRequest request3 = new QueryUserRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(Integer.MIN_VALUE, -1), + null, + null + ); + assertThat(request3.validate().getMessage(), containsString("[size] parameter cannot be negative")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index bddc30b8d7b83..21827c4b9a373 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; @@ -281,6 +282,7 @@ public void testReadSecurityPrivilege() { GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME, GetUsersAction.NAME, + ActionTypes.QUERY_USER_ACTION.name(), HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME, GetSecuritySettingsAction.NAME @@ -339,16 +341,11 @@ public void testManageUserProfilePrivilege() { "cluster:admin/xpack/security/role/get", "cluster:admin/xpack/security/role/delete" ); - verifyClusterActionDenied( - ClusterPrivilegeResolver.MANAGE_USER_PROFILE, - "cluster:admin/xpack/security/role/put", - "cluster:admin/xpack/security/role/get", - "cluster:admin/xpack/security/role/delete" - ); verifyClusterActionDenied( ClusterPrivilegeResolver.MANAGE_USER_PROFILE, "cluster:admin/xpack/security/user/put", "cluster:admin/xpack/security/user/get", + "cluster:admin/xpack/security/user/query", "cluster:admin/xpack/security/user/delete" ); verifyClusterActionDenied( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index d4500d9439329..b0ad137f0f1b6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.RestController; @@ -38,7 +37,6 @@ import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; @@ -77,14 +75,7 @@ protected void doExecute(Task task, ActionRequest request, ActionListener(); actions.put(TermsEnumAction.INSTANCE, transportAction); - client.initialize( - actions, - taskManager, - () -> "local", - mock(Transport.Connection.class), - null, - new NamedWriteableRegistry(List.of()) - ); + client.initialize(actions, taskManager, () -> "local", mock(Transport.Connection.class), null); controller.registerHandler(action); } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/connector-secrets.json b/x-pack/plugin/core/template-resources/src/main/resources/connector-secrets.json new file mode 100644 index 0000000000000..96fa641726fa3 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/connector-secrets.json @@ -0,0 +1,26 @@ +{ + "settings": { + "index": { + "auto_expand_replicas": "0-1", + "number_of_shards": 1, + "number_of_replicas": 0, + "priority": 100, + "refresh_interval": "1s" + } + }, + "mappings": { + "_doc" : { + "dynamic": false, + "_meta": { + "version": "${connector-secrets.version}", + "managed_index_mappings_version": ${connector-secrets.managed.index.version} + }, + "properties": { + "value": { + "type": "keyword", + "index": false + } + } + } + } +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json index b3d6dc3936d59..233c170890d40 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @@ -101,6 +101,13 @@ "type": "long" } } + }, + "total_data_set_size": { + "properties": { + "bytes": { + "type": "long" + } + } } } } @@ -623,6 +630,13 @@ "type": "long" } } + }, + "total_data_set_size": { + "properties": { + "bytes": { + "type": "long" + } + } } } }, @@ -1253,6 +1267,9 @@ "properties": { "size_in_bytes": { "type": "long" + }, + "total_data_set_size_in_bytes": { + "type": "long" } } }, @@ -1410,6 +1427,9 @@ }, "size_in_bytes": { "type": "long" + }, + "total_data_set_size_in_bytes": { + "type": "long" } } }, @@ -1704,6 +1724,13 @@ "type": "long" } } + }, + "total_data_set_size": { + "properties": { + "bytes": { + "type": "long" + } + } } } }, @@ -1828,6 +1855,13 @@ "type": "long" } } + }, + "total_data_set_size": { + "properties": { + "bytes": { + "type": "long" + } + } } } }, diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index 99221e47ae592..ab09c31d6f80c 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -12,7 +12,6 @@ esplugin { dependencies { javaRestTestImplementation project(path: ':x-pack:plugin:deprecation:qa:common') - javaRestTestImplementation project(':client:rest-high-level') javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") } diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 415b571ec182a..49138c6b5dce0 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -11,7 +11,6 @@ esplugin { dependencies { javaRestTestImplementation project(path: ':x-pack:plugin:deprecation:qa:common') - javaRestTestImplementation project(':client:rest-high-level') javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") } diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 2e58bb8f673b5..6d95038e2cbcc 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -26,7 +26,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -@SuppressWarnings("removal") public class MlDeprecationIT extends ESRestTestCase { private static final RequestOptions REQUEST_OPTIONS = RequestOptions.DEFAULT.toBuilder() diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java index 01c449645fa99..8080761983136 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java +++ b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java @@ -8,6 +8,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -34,6 +35,7 @@ public class TestDeprecationPlugin extends Plugin implements ActionPlugin, Searc @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index dd060653a4f34..329370929ec53 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.RateLimitingFilter; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -63,6 +64,7 @@ public class Deprecation extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 0351db53b6c69..13ef198863284 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -52,7 +52,7 @@ public class DeprecationInfoAction extends ActionType implements ToXContentObject { diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/30_date_histogram.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/30_date_histogram.yml index b7f3ec7b8f384..831ad158deda4 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/30_date_histogram.yml +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/30_date_histogram.yml @@ -13,8 +13,8 @@ setup: mode: time_series routing_path: [ uid ] time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z + start_time: 2020-01-01T00:00:00Z + end_time: 2022-01-01T00:00:00Z mappings: properties: "@timestamp": @@ -39,12 +39,6 @@ setup: - '{ "index": {} }' - '{ "@timestamp": "2021-04-28T18:55:00Z", "uid": "004", "total_memory_used": 120770 }' - - do: - indices.put_settings: - index: test - body: - index.blocks.write: true - --- "Date histogram aggregation on time series index and rollup indices": - skip: @@ -52,6 +46,12 @@ setup: reason: "rollup: unsupported aggregations errors added in 8.5.0" features: close_to + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + - do: indices.downsample: index: test @@ -142,24 +142,6 @@ setup: - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T18:00:00.000Z"} - match: { aggregations.date_histogram.buckets.0.key: 1619632800000 } - # date histogram aggregation with non-utc timezone on rollup index not supported - - do: - catch: bad_request - search: - index: test-downsample - body: - size: 0 - aggs: - date_histogram: - date_histogram: - field: "@timestamp" - fixed_interval: 1h - time_zone: "America/New_York" - - - match: { status: 400 } - - match: { error.root_cause.0.type: illegal_argument_exception } - - match: { error.root_cause.0.reason: "Field [@timestamp] of type [date] is not supported for aggregation [date_histogram] with timezone [America/New_York]" } - # date histogram aggregation with non-utc timezone on time series index supported - do: search: @@ -247,3 +229,485 @@ setup: - match: { _shards.failures.0.index: "test-downsample" } - match: { _shards.failures.0.reason.type: illegal_argument_exception } - match: { _shards.failures.0.reason.reason: "Field [@timestamp] of type [date] is not supported for aggregation [date_histogram] with interval type [calendar_interval]" } + +--- +timezone support - 15m: + - skip: + version: " - 8.12.99" + reason: "timezone support added in 8.13" + + - do: + bulk: + refresh: true + index: test + body: + # Check timezone support + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T10:05:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T10:55:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T11:05:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T11:55:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T12:05:00Z", "uid": "001", "total_memory_used": 120770 }' + # Check daylight savings + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T05:00:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T06:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T07:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T09:00:00Z", "uid": "001", "total_memory_used": 109009 }' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "15m" + } + + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2021-04-28T15:00:00Z" + lt: "2021-04-29T15:00:00Z" + + - match: { hits.total.value: 4 } + - length: { aggregations.date_histogram.buckets: 1 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 4 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T14:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.0.key: 1619632800000 } + - is_false: aggregations.date_histogram.downsampled_results_offset + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "-01:15" + query: + range: + "@timestamp": + gt: "2021-04-27T15:00:00Z" + lt: "2021-04-28T15:00:00Z" + + - match: { hits.total.value: 5 } + - length: { aggregations.date_histogram.buckets: 3 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T08:00:00.000-01:15" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2021-04-28T09:00:00.000-01:15" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2021-04-28T10:00:00.000-01:15" } + - is_false: aggregations.date_histogram.downsampled_results_offset + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "+02:15" + query: + range: + "@timestamp": + gt: "2021-04-27T15:00:00Z" + lt: "2021-04-28T15:00:00Z" + + - match: { hits.total.value: 5 } + - length: { aggregations.date_histogram.buckets: 3 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T12:00:00.000+02:15" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2021-04-28T13:00:00.000+02:15" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2021-04-28T14:00:00.000+02:15" } + - is_false: aggregations.date_histogram.downsampled_results_offset + + # Check timezone with daylight savings + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2020-03-08T00:00:00Z" + lt: "2020-03-10T00:00:00Z" + + - match: { hits.total.value: 10 } + - length: { aggregations.date_histogram.buckets: 7 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2020-03-08T23:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2020-03-09T00:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2020-03-09T01:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.3.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.3.key_as_string: "2020-03-09T02:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.4.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.4.key_as_string: "2020-03-09T03:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.5.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.5.key_as_string: "2020-03-09T04:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.6.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.6.key_as_string: "2020-03-09T05:00:00.000-04:00" } + - is_false: aggregations.date_histogram.downsampled_results_offset + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1d + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2020-03-08T00:00:00Z" + lt: "2020-03-10T00:00:00Z" + + - match: { hits.total.value: 10 } + - length: { aggregations.date_histogram.buckets: 2 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2020-03-08T00:00:00.000-05:00" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 8 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2020-03-09T00:00:00.000-04:00" } + - is_false: aggregations.date_histogram.downsampled_results_offset + +--- +timezone support - 1h: + - skip: + version: " - 8.12.99" + reason: "timezone support added in 8.13" + + - do: + bulk: + refresh: true + index: test + body: + # Check timezone support + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T10:05:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T10:55:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T11:05:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T11:55:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2021-04-28T12:05:00Z", "uid": "001", "total_memory_used": 120770 }' + # Check daylight savings + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T05:00:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T06:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T07:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T09:00:00Z", "uid": "001", "total_memory_used": 109009 }' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2021-04-28T15:00:00Z" + lt: "2021-04-29T15:00:00Z" + + - match: { hits.total.value: 4 } + - length: { aggregations.date_histogram.buckets: 1 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 4 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T14:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.0.key: 1619632800000 } + - is_false: aggregations.date_histogram.downsampled_results_offset + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "-01:15" + query: + range: + "@timestamp": + gt: "2021-04-27T15:00:00Z" + lt: "2021-04-28T15:00:00Z" + + - match: { hits.total.value: 3 } + - match: { aggregations.date_histogram.downsampled_results_offset: true } + - length: { aggregations.date_histogram.buckets: 3 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T08:45:00.000-01:15" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2021-04-28T09:45:00.000-01:15" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2021-04-28T10:45:00.000-01:15" } + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "+02:15" + query: + range: + "@timestamp": + gt: "2021-04-27T15:00:00Z" + lt: "2021-04-28T15:00:00Z" + + - match: { hits.total.value: 3 } + - match: { aggregations.date_histogram.downsampled_results_offset: true } + - length: { aggregations.date_histogram.buckets: 3 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2021-04-28T12:15:00.000+02:15" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2021-04-28T13:15:00.000+02:15" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2021-04-28T14:15:00.000+02:15" } + + # Check timezone with daylight savings + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1h + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2020-03-08T00:00:00Z" + lt: "2020-03-10T00:00:00Z" + + - match: { hits.total.value: 7 } + - length: { aggregations.date_histogram.buckets: 7 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2020-03-08T23:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2020-03-09T00:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2020-03-09T01:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.3.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.3.key_as_string: "2020-03-09T02:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.4.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.4.key_as_string: "2020-03-09T03:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.5.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.5.key_as_string: "2020-03-09T04:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.6.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.6.key_as_string: "2020-03-09T05:00:00.000-04:00" } + - is_false: aggregations.date_histogram.downsampled_results_offset + + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1d + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2020-03-08T00:00:00Z" + lt: "2020-03-10T00:00:00Z" + + - match: { hits.total.value: 7 } + - length: { aggregations.date_histogram.buckets: 2 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2020-03-08T00:00:00.000-05:00" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 8 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2020-03-09T00:00:00.000-04:00" } + - is_false: aggregations.date_histogram.downsampled_results_offset + +--- +timezone support - 1d: + - skip: + version: " - 8.12.99" + reason: "timezone support added in 8.13" + + - do: + bulk: + refresh: true + index: test + body: + # Check daylight savings + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-08T03:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-08T03:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T03:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T04:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T05:00:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T06:00:00Z", "uid": "001", "total_memory_used": 106780 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T07:50:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:00:00Z", "uid": "001", "total_memory_used": 110450 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-09T08:50:00Z", "uid": "001", "total_memory_used": 109009 }' + - '{ "index": {} }' + - '{ "@timestamp": "2020-03-10T09:00:00Z", "uid": "001", "total_memory_used": 109009 }' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1d" + } + + - is_true: acknowledged + + # Check timezone with daylight savings + - do: + search: + index: test-downsample + body: + size: 0 + aggs: + date_histogram: + date_histogram: + field: "@timestamp" + fixed_interval: 1d + time_zone: "America/New_York" + query: + range: + "@timestamp": + gt: "2020-03-01T00:00:00Z" + lt: "2020-03-30T00:00:00Z" + + - match: { hits.total.value: 3 } + - match: { aggregations.date_histogram.downsampled_results_offset: true } + - length: { aggregations.date_histogram.buckets: 3 } + - match: { aggregations.date_histogram.buckets.0.doc_count: 2 } + - match: { aggregations.date_histogram.buckets.0.key_as_string: "2020-03-07T19:00:00.000-05:00" } + - match: { aggregations.date_histogram.buckets.1.doc_count: 9 } + - match: { aggregations.date_histogram.buckets.1.key_as_string: "2020-03-08T19:00:00.000-04:00" } + - match: { aggregations.date_histogram.buckets.2.doc_count: 1 } + - match: { aggregations.date_histogram.buckets.2.key_as_string: "2020-03-09T19:00:00.000-04:00" } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index f248da8a7842a..e0d1fa45a80c3 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -139,6 +139,7 @@ public boolean validateClusterForming() { Settings indexSettings = getSettingsResponse.getIndexToSettings().get(targetIndex); assertThat(indexSettings, is(notNullValue())); assertThat(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(indexSettings), is(IndexMetadata.DownsampleTaskStatus.SUCCESS)); + assertEquals("5m", IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.get(indexSettings)); } catch (Exception e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java index 84dfb4169eb92..260782a3eb0f3 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java @@ -75,6 +75,7 @@ public List> getExecutorBuilders(Settings settings) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index ebf31bd32b48f..f500ce986f6dd 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -241,7 +241,7 @@ public static class DelegatingAction extends ActionType { public static final String NAME = "indices:data/read/downsample_delegate"; private DelegatingAction() { - super(NAME, in -> new ActionResponse.Empty()); + super(NAME); } public static class Request extends ActionRequest implements IndicesRequest { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java index 34b7d3c90b267..813dcc8c8d5a4 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java @@ -91,7 +91,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_061; + return TransportVersions.V_8_10_X; } @Override diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index e7bd2f0c0fb27..f3bb43b9a3f38 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -17,12 +17,12 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -115,6 +115,7 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc private final IndexScopedSettings indexScopedSettings; private final ThreadContext threadContext; private final PersistentTasksService persistentTasksService; + private String downsamplingInterval; private static final Set FORBIDDEN_SETTINGS = Set.of( IndexSettings.DEFAULT_PIPELINE.getKey(), @@ -284,6 +285,7 @@ protected void masterOperation( // Validate downsampling interval validateDownsamplingInterval(mapperService, request.getDownsampleConfig()); + downsamplingInterval = request.getDownsampleConfig().getInterval().toString(); final List dimensionFields = new ArrayList<>(); final List metricFields = new ArrayList<>(); @@ -846,7 +848,7 @@ public void onFailure(Exception e) { /** * Updates the downsample target index metadata (task status) */ - class RefreshDownsampleIndexActionListener implements ActionListener { + class RefreshDownsampleIndexActionListener implements ActionListener { private final ActionListener actionListener; private final TaskId parentTask; @@ -866,7 +868,7 @@ class RefreshDownsampleIndexActionListener implements ActionListener labelFields ) { final AggregationBuilder aggregations = buildAggregations(config, metricFields, labelFields, config.getTimestampField()); - Aggregations origResp = aggregate(sourceIndex, aggregations); - Aggregations downsampleResp = aggregate(downsampleIndex, aggregations); + InternalAggregations origResp = aggregate(sourceIndex, aggregations); + InternalAggregations downsampleResp = aggregate(downsampleIndex, aggregations); assertEquals(origResp.asMap().keySet(), downsampleResp.asMap().keySet()); StringTerms originalTsIdTermsAggregation = (StringTerms) origResp.getAsMap().values().stream().toList().get(0); @@ -1164,25 +1164,25 @@ private void assertDownsampleIndexAggregations( InternalDateHistogram.Bucket downsampleDateHistogramBucket = downsampleDateHistogramBuckets.get(i); assertEquals(originalDateHistogramBucket.getKeyAsString(), downsampleDateHistogramBucket.getKeyAsString()); - Aggregations originalAggregations = originalDateHistogramBucket.getAggregations(); - Aggregations downsampleAggregations = downsampleDateHistogramBucket.getAggregations(); + InternalAggregations originalAggregations = originalDateHistogramBucket.getAggregations(); + InternalAggregations downsampleAggregations = downsampleDateHistogramBucket.getAggregations(); assertEquals(originalAggregations.asList().size(), downsampleAggregations.asList().size()); - List nonTopHitsOriginalAggregations = originalAggregations.asList() + List nonTopHitsOriginalAggregations = originalAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits") == false) .toList(); - List nonTopHitsDownsampleAggregations = downsampleAggregations.asList() + List nonTopHitsDownsampleAggregations = downsampleAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits") == false) .toList(); assertEquals(nonTopHitsOriginalAggregations, nonTopHitsDownsampleAggregations); - List topHitsOriginalAggregations = originalAggregations.asList() + List topHitsOriginalAggregations = originalAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits")) .toList(); - List topHitsDownsampleAggregations = downsampleAggregations.asList() + List topHitsDownsampleAggregations = downsampleAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits")) .toList(); @@ -1224,7 +1224,7 @@ private void assertDownsampleIndexAggregations( ); Object originalLabelValue = originalHit.getDocumentFields().values().stream().toList().get(0).getValue(); Object downsampleLabelValue = downsampleHit.getDocumentFields().values().stream().toList().get(0).getValue(); - Optional labelAsMetric = nonTopHitsOriginalAggregations.stream() + Optional labelAsMetric = nonTopHitsOriginalAggregations.stream() .filter(agg -> agg.getName().equals("metric_" + downsampleTopHits.getName())) .findFirst(); // NOTE: this check is possible only if the label can be indexed as a metric (the label is a numeric field) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java index 251c778260188..0a754cbb542b2 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; @@ -24,6 +23,7 @@ import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; @@ -240,7 +240,7 @@ private void indexDocs(final String dataStream, int numDocs, long startTime) { final BulkItemResponse[] items = bulkResponse.getItems(); assertThat(items.length, equalTo(numDocs)); assertThat(bulkResponse.hasFailures(), equalTo(false)); - final RefreshResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest(dataStream)).actionGet(); + final BroadcastResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest(dataStream)).actionGet(); assertThat(refreshResponse.getStatus().getStatus(), equalTo(RestStatus.OK.getStatus())); } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java index 8e0c96c6ee245..e9a075227107c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java @@ -161,6 +161,7 @@ protected XPackLicenseState getLicenseState() { @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java index 5e0e7a6314d67..af5791ac6efd1 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java @@ -53,7 +53,7 @@ public class EnrichCoordinatorProxyAction extends ActionType { public static final String NAME = "indices:data/read/xpack/enrich/coordinate_lookups"; private EnrichCoordinatorProxyAction() { - super(NAME, SearchResponse::new); + super(NAME); } public static class TransportAction extends HandledTransportAction { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java index 1bd3b5c121c06..f40f14059772e 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; @@ -42,7 +41,7 @@ public class EnrichCoordinatorStatsAction extends ActionType { public static final EnrichReindexAction INSTANCE = new EnrichReindexAction(); private EnrichReindexAction() { - super(NAME, BulkByScrollResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index 94e9033dcca4f..2aa30614b58f3 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -96,7 +96,7 @@ public class EnrichShardMultiSearchAction extends ActionType { @@ -303,22 +303,26 @@ private static BytesReference filterSource(FetchSourceContext fetchSourceContext private static SearchResponse createSearchResponse(TopDocs topDocs, SearchHit[] hits) { SearchHits searchHits = new SearchHits(hits, topDocs.totalHits, 0); - return new SearchResponse( - searchHits, - null, - null, - false, - null, - null, - 0, - null, - 1, - 1, - 0, - 1L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); + try { + return new SearchResponse( + searchHits, + null, + null, + false, + null, + null, + 0, + null, + 1, + 1, + 0, + 1L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + } finally { + searchHits.decRef(); + } } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java index e606f6ac8ea9c..769a86c5ec5b1 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -61,7 +61,7 @@ public class InternalExecutePolicyAction extends ActionType { public static final String NAME = "cluster:admin/xpack/enrich/internal_execute"; private InternalExecutePolicyAction() { - super(NAME, Response::new); + super(NAME); } public static class Request extends ExecuteEnrichPolicyAction.Request { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java index 230c4712a1700..8eb30f2688143 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.enrich; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -19,6 +20,7 @@ import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; import java.nio.file.Path; import java.util.Collections; @@ -49,7 +51,7 @@ public EnrichTransportXPackInfoAction( } @Override - protected List infoActions() { + protected List> infoActions() { return Collections.singletonList(XPackInfoFeatureAction.ENRICH); } } diff --git a/x-pack/plugin/ent-search/build.gradle b/x-pack/plugin/ent-search/build.gradle index 92a1c007f72bf..4551011b03ca1 100644 --- a/x-pack/plugin/ent-search/build.gradle +++ b/x-pack/plugin/ent-search/build.gradle @@ -38,6 +38,13 @@ dependencies { module ':modules:search-business-rules' } +testClusters.configureEach { + testDistribution = 'DEFAULT' + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.autoconfiguration.enabled', 'false' + user username: 'x_pack_rest_user', password: 'x-pack-test-password' +} + tasks.named("dependencyLicenses").configure { mapping from: /jackson.*/, to: 'jackson' } diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index f92f1b6223fcc..b0f1e8bd026b0 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -30,10 +30,3 @@ BuildParams.bwcVersions.withWireCompatible(v -> v.after("8.8.0")) { bwcVersion, systemProperty("tests.old_cluster_version", bwcVersion) } } - - -testClusters.configureEach { - testDistribution = 'DEFAULT' - numberOfNodes = 1 - setting 'xpack.license.self_generated.type', 'trial' -} diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java similarity index 93% rename from x-pack/plugin/ent-search/qa/full-cluster-restart/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java rename to x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index 30098871fb805..6b1b6fc886825 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -4,18 +4,11 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ - -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ package org.elasticsearch.xpack.application; import com.carrotsearch.randomizedtesting.annotations.Name; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -28,6 +21,8 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.Version.V_8_12_0; + public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { private static final Version DSL_DEFAULT_RETENTION_VERSION = V_8_12_0; @@ -53,11 +48,11 @@ protected ElasticsearchCluster getUpgradeCluster() { return cluster; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104470") public void testBehavioralAnalyticsDataRetention() throws Exception { - assumeTrue( "Data retention changed by default to DSL in " + DSL_DEFAULT_RETENTION_VERSION, - getOldClusterTestVersion().before(DSL_DEFAULT_RETENTION_VERSION) + getOldClusterTestVersion().before(DSL_DEFAULT_RETENTION_VERSION.toString()) ); String legacyAnalyticsCollectionName = "oldstuff"; diff --git a/x-pack/plugin/ent-search/qa/rest/roles.yml b/x-pack/plugin/ent-search/qa/rest/roles.yml index 4d868f41e78b3..9dac14709db8d 100644 --- a/x-pack/plugin/ent-search/qa/rest/roles.yml +++ b/x-pack/plugin/ent-search/qa/rest/roles.yml @@ -16,6 +16,8 @@ user: cluster: - post_behavioral_analytics_event - manage_api_key + - read_connector_secrets + - write_connector_secrets indices: - names: [ "test-index1", diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml index 77d4f28721525..c7bc5f48a3d89 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml @@ -101,3 +101,17 @@ setup: service_type: super-connector - match: { result: 'updated' } + +--- +'Create Connector - Invalid Index Name': + - do: + catch: "bad_request" + connector.put: + connector_id: test-connector-recreating + body: + index_name: _this-is-invalid-index-name + name: my-connector + language: pl + is_native: false + service_type: super-connector + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml index 8d0fa14311608..9b7432adf290d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml @@ -76,3 +76,15 @@ setup: - match: { custom_scheduling: {} } - match: { filtering.0.domain: DEFAULT } +--- +'Create Connector - Invalid Index Name': + - do: + catch: "bad_request" + connector.post: + body: + index_name: _this-is-invalid-index-name + name: my-connector + language: pl + is_native: false + service_type: super-connector + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml index 042fea7091f43..39b7b2d03e68f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml @@ -2,7 +2,7 @@ setup: - skip: version: " - 8.11.99" reason: Introduced in 8.12.0 - + features: is_after - do: connector.put: connector_id: test-connector @@ -25,6 +25,19 @@ setup: connector_id: test-connector - exists: last_seen + - set: { last_seen: last_seen_before_check_in } + + - do: + connector.check_in: + connector_id: test-connector + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - is_after: { last_seen: $last_seen_before_check_in } --- "Connector Check-in Error - Connector doesn't exist": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml index f9989b615bef6..08bde123541ac 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml @@ -30,6 +30,24 @@ setup: - match: { last_sync_error: "oh no error" } - match: { last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" } +--- +"Update Connector Last Sync Stats - Supports different datetime format": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_sync_error: "oh no error" + last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { last_sync_error: "oh no error" } + - match: { last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" } + --- "Update Connector Last Sync Stats - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 5a012853b4bf9..df4a640a0495d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -72,8 +72,18 @@ setup: type: str ui_restrictions: [ ] validations: + - constraint: [123, 456, 789] + type: included_in + - constraint: ["string 1", "string 2", "string 3"] + type: included_in - constraint: 0 type: greater_than + - constraint: 42 + type: less_than + - constraint: int + type: list_type + - constraint: "\\d+" + type: regex value: 456 - match: { result: updated } @@ -84,6 +94,18 @@ setup: - match: { configuration.some_field.value: 456 } - match: { status: configured } + - match: { configuration.some_field.validations.0.constraint: [123, 456, 789] } + - match: { configuration.some_field.validations.0.type: included_in } + - match: { configuration.some_field.validations.1.constraint: ["string 1", "string 2", "string 3"] } + - match: { configuration.some_field.validations.1.type: included_in } + - match: { configuration.some_field.validations.2.constraint: 0 } + - match: { configuration.some_field.validations.2.type: greater_than } + - match: { configuration.some_field.validations.3.constraint: 42 } + - match: { configuration.some_field.validations.3.type: less_than } + - match: { configuration.some_field.validations.4.constraint: int } + - match: { configuration.some_field.validations.4.type: list_type } + - match: { configuration.some_field.validations.5.constraint: "\\d+" } + - match: { configuration.some_field.validations.5.type: regex } --- "Update Connector Configuration with null tooltip": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml index 6fe025b4ae002..bf1c7254b9b99 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml @@ -30,6 +30,23 @@ setup: - match: { name: test-name } +--- +"Update Connector Description": + - do: + connector.update_name: + connector_id: test-connector + body: + description: test-description + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { description: test-description } + --- "Update Connector Name and Description": - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/337_connector_update_service_type.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/337_connector_update_service_type.yml new file mode 100644 index 0000000000000..53cef9f5067b7 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/337_connector_update_service_type.yml @@ -0,0 +1,61 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Service Type": + - do: + connector.update_service_type: + connector_id: test-connector + body: + service_type: even-better-connector + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { service_type: even-better-connector } + - match: { status: created } + +--- +"Update Connector Service Type - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_service_type: + connector_id: test-non-existent-connector + body: + service_type: even-better-connector + +--- +"Update Connector Service Type - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_service_type: + connector_id: "" + body: + service_type: even-better-connector + +--- +"Update Connector Service Type - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_service_type: + connector_id: test-connector + body: + service_type: + field_1: test + field_2: something diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/339_connector_update_native.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/339_connector_update_native.yml new file mode 100644 index 0000000000000..2cfed61272e91 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/339_connector_update_native.yml @@ -0,0 +1,77 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Native": + - do: + connector.update_native: + connector_id: test-connector + body: + is_native: true + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { is_native: true } + - match: { status: configured } + + - do: + connector.update_native: + connector_id: test-connector + body: + is_native: false + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { is_native: false } + - match: { status: configured } + +--- +"Update Connector Native - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_native: + connector_id: test-non-existent-connector + body: + is_native: true + +--- +"Update Connector Native - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_native: + connector_id: "" + body: + is_native: true + +--- +"Update Connector Native - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_native: + connector_id: test-connector + body: + is_native: + field_1: test + field_2: something + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml index 582a523605663..5e3a733dce792 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml @@ -29,7 +29,7 @@ setup: connector_sync_job.get: connector_sync_job_id: $id - - match: { connector.id: test-connector} + - match: { connector.id: test-connector } - match: { job_type: full } - match: { trigger_method: on_demand } - match: { status: pending } @@ -41,6 +41,106 @@ setup: - exists: created_at - exists: last_seen +--- +'Create connector sync job with filtering': + - do: + connector.update_filtering: + connector_id: test-connector + body: + filtering: + - active: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: { } + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-SYNC-JOB-TEST + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [ ] + state: valid + domain: DEFAULT + draft: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: { } + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [ ] + state: valid + - active: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: { } + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-1 + order: 0 + policy: include + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [ ] + state: valid + domain: TEST + draft: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: { } + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-1 + order: 0 + policy: exclude + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [ ] + state: valid + + - match: { result: updated } + + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + + - set: { id: id } + + - match: { id: $id } + + - do: + connector_sync_job.get: + connector_sync_job_id: $id + + - match: { connector.filtering.rules.0.id: RULE-ACTIVE-SYNC-JOB-TEST } + - match: { connector.filtering.rules.0.rule: regex } + - match: { connector.filtering.validation.state: valid } + - match: { connector.filtering.advanced_snippet.created_at: "2023-05-25T12:30:00.000Z" } + --- 'Create connector sync job with complex connector document': diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml new file mode 100644 index 0000000000000..6a4ee3ba7f6cb --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml @@ -0,0 +1,55 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + +--- +'Post connector secret - admin': + - do: + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + connector_secret.get: + id: $id + - match: { value: my-secret } + +--- +'Post connector secret - authorized user': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.get: + id: $id + - match: { value: my-secret } + +--- +'Post connector secret - unauthorized user': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged + connector_secret.post: + body: + value: my-secret + catch: unauthorized + +--- +'Post connector secret when id is missing should fail': + - do: + connector_secret.post: + body: + value: null + catch: bad_request diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml new file mode 100644 index 0000000000000..8fd676bb977b6 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml @@ -0,0 +1,60 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + +--- +'Get connector secret - admin': + - do: + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + connector_secret.get: + id: $id + - match: { value: my-secret } + +--- +'Get connector secret - user with privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.get: + id: $id + - match: { value: my-secret } + +--- +'Get connector secret - user without privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged + connector_secret.get: + id: $id + catch: unauthorized + +--- +'Get connector secret - Secret does not exist': + - do: + connector_secret.get: + id: non-existing-secret-id + catch: missing diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml new file mode 100644 index 0000000000000..ed50fc55a81e0 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml @@ -0,0 +1,71 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + +--- +'Delete connector secret - admin': + - do: + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + + - do: + connector_secret.delete: + id: $id + - match: { deleted: true } + + - do: + connector_secret.get: + id: $id + catch: missing + +--- +'Delete connector secret - user with privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.delete: + id: $id + - match: { deleted: true } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.get: + id: $id + catch: missing + +--- +'Delete connector secret - user without privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged + connector_secret.delete: + id: $id + catch: unauthorized + +--- +'Delete connector secret - Secret does not exist': + - do: + connector_secret.delete: + id: non-existing-secret-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/javaRestTest/java/org/elasticsearch/xpack/entsearch/ConnectorSecretsSystemIndexIT.java b/x-pack/plugin/ent-search/src/javaRestTest/java/org/elasticsearch/xpack/entsearch/ConnectorSecretsSystemIndexIT.java new file mode 100644 index 0000000000000..730ad1d83a318 --- /dev/null +++ b/x-pack/plugin/ent-search/src/javaRestTest/java/org/elasticsearch/xpack/entsearch/ConnectorSecretsSystemIndexIT.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.entsearch; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class ConnectorSecretsSystemIndexIT extends ESRestTestCase { + + static final String BASIC_AUTH_VALUE = basicAuthHeaderValue( + "x_pack_rest_user", + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING + ); + + @Override + protected Settings restClientSettings() { + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE).build(); + } + + public void testConnectorSecretsCRUD() throws Exception { + // post secret + final String secretJson = getPostSecretJson(); + Request postRequest = new Request("POST", "/_connector/_secret/"); + postRequest.setJsonEntity(secretJson); + Response postResponse = client().performRequest(postRequest); + assertThat(postResponse.getStatusLine().getStatusCode(), is(200)); + Map responseMap = getResponseMap(postResponse); + assertThat(responseMap.size(), is(1)); + assertTrue(responseMap.containsKey("id")); + final String id = responseMap.get("id").toString(); + + // get secret + Request getRequest = new Request("GET", "/_connector/_secret/" + id); + Response getResponse = client().performRequest(getRequest); + assertThat(getResponse.getStatusLine().getStatusCode(), is(200)); + responseMap = getResponseMap(getResponse); + assertThat(responseMap.size(), is(2)); + assertTrue(responseMap.containsKey("id")); + assertTrue(responseMap.containsKey("value")); + assertThat(responseMap.get("value"), is("test secret")); + } + + public void testPostInvalidSecretBody() throws Exception { + Request postRequest = new Request("POST", "/_connector/_secret/"); + postRequest.setJsonEntity(""" + {"something":"else"}"""); + ResponseException re = expectThrows(ResponseException.class, () -> client().performRequest(postRequest)); + Response getResponse = re.getResponse(); + assertThat(getResponse.getStatusLine().getStatusCode(), is(400)); + } + + public void testGetNonExistingSecret() { + Request getRequest = new Request("GET", "/_connector/_secret/123"); + ResponseException re = expectThrows(ResponseException.class, () -> client().performRequest(getRequest)); + Response getResponse = re.getResponse(); + assertThat(getResponse.getStatusLine().getStatusCode(), is(404)); + } + + private String getPostSecretJson() throws IOException { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.startObject(); + { + builder.field("value", "test secret"); + } + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } + } + + private Map getResponseMap(Response response) throws IOException { + return XContentHelper.convertToMap(XContentType.JSON.xContent(), EntityUtils.toString(response.getEntity()), false); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/module-info.java b/x-pack/plugin/ent-search/src/main/java/module-info.java index d8cbceda4d8a3..5850b279f8b09 100644 --- a/x-pack/plugin/ent-search/src/main/java/module-info.java +++ b/x-pack/plugin/ent-search/src/main/java/module-info.java @@ -39,4 +39,6 @@ exports org.elasticsearch.xpack.application.connector.syncjob.action; provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.application.EnterpriseSearchFeatures; + + exports org.elasticsearch.xpack.application.connector.secrets.action to org.elasticsearch.server; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 73025c0b23b56..3933e7923d6b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -58,8 +59,10 @@ import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorNameAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorNativeAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorServiceTypeAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; @@ -71,16 +74,31 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorNameAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorNativeAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorServiceTypeAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNativeAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorServiceTypeAction; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsFeature; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.RestDeleteConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.RestGetConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.RestPostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.TransportDeleteConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.TransportGetConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.TransportPostConnectorSecretAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; @@ -232,8 +250,10 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), new ActionHandler<>(UpdateConnectorNameAction.INSTANCE, TransportUpdateConnectorNameAction.class), + new ActionHandler<>(UpdateConnectorNativeAction.INSTANCE, TransportUpdateConnectorNativeAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), + new ActionHandler<>(UpdateConnectorServiceTypeAction.INSTANCE, TransportUpdateConnectorServiceTypeAction.class), // SyncJob API new ActionHandler<>(GetConnectorSyncJobAction.INSTANCE, TransportGetConnectorSyncJobAction.class), @@ -251,12 +271,23 @@ protected XPackLicenseState getLicenseState() { ); } + if (ConnectorSecretsFeature.isEnabled()) { + actionHandlers.addAll( + List.of( + new ActionHandler<>(DeleteConnectorSecretAction.INSTANCE, TransportDeleteConnectorSecretAction.class), + new ActionHandler<>(GetConnectorSecretAction.INSTANCE, TransportGetConnectorSecretAction.class), + new ActionHandler<>(PostConnectorSecretAction.INSTANCE, TransportPostConnectorSecretAction.class) + ) + ); + } + return Collections.unmodifiableList(actionHandlers); } @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -309,8 +340,10 @@ public List getRestHandlers( new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorLastSyncStatsAction(), new RestUpdateConnectorNameAction(), + new RestUpdateConnectorNativeAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), + new RestUpdateConnectorServiceTypeAction(), // SyncJob API new RestGetConnectorSyncJobAction(), @@ -325,6 +358,12 @@ public List getRestHandlers( ); } + if (ConnectorSecretsFeature.isEnabled()) { + restHandlers.addAll( + List.of(new RestGetConnectorSecretAction(), new RestPostConnectorSecretAction(), new RestDeleteConnectorSecretAction()) + ); + } + return Collections.unmodifiableList(restHandlers); } @@ -359,7 +398,15 @@ public Collection createComponents(PluginServices services) { @Override public Collection getSystemIndexDescriptors(Settings settings) { - return Arrays.asList(SearchApplicationIndexService.getSystemIndexDescriptor(), QueryRulesIndexService.getSystemIndexDescriptor()); + Collection systemIndices = new ArrayList<>( + List.of(SearchApplicationIndexService.getSystemIndexDescriptor(), QueryRulesIndexService.getSystemIndexDescriptor()) + ); + + if (ConnectorSecretsFeature.isEnabled()) { + systemIndices.add(ConnectorSecretsIndexService.getSystemIndexDescriptor()); + } + + return systemIndices; } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index f80628c2f342f..43601ab1b2943 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -26,14 +26,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class DeleteAnalyticsCollectionAction extends ActionType { +public class DeleteAnalyticsCollectionAction { - public static final DeleteAnalyticsCollectionAction INSTANCE = new DeleteAnalyticsCollectionAction(); public static final String NAME = "cluster:admin/xpack/application/analytics/delete"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private DeleteAnalyticsCollectionAction() { - super(NAME, AcknowledgedResponse::readFrom); - } + private DeleteAnalyticsCollectionAction() {/* no instances */} public static class Request extends MasterNodeRequest implements ToXContentObject { private final String collectionName; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index acabe85af51b5..f9eeb2cca6d2e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -28,14 +28,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class GetAnalyticsCollectionAction extends ActionType { +public class GetAnalyticsCollectionAction { - public static final GetAnalyticsCollectionAction INSTANCE = new GetAnalyticsCollectionAction(); public static final String NAME = "cluster:admin/xpack/application/analytics/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private GetAnalyticsCollectionAction() { - super(NAME, GetAnalyticsCollectionAction.Response::new); - } + private GetAnalyticsCollectionAction() {/* no instances */} public static class Request extends MasterNodeReadRequest implements ToXContentObject { private final String[] names; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java index 67599e565b816..7e5d05e9e222e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java @@ -36,15 +36,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class PostAnalyticsEventAction extends ActionType { - - public static final PostAnalyticsEventAction INSTANCE = new PostAnalyticsEventAction(); +public class PostAnalyticsEventAction { public static final String NAME = "cluster:admin/xpack/application/analytics/post_event"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private PostAnalyticsEventAction() { - super(NAME, Response::readFromStreamInput); - } + private PostAnalyticsEventAction() {/* no instances */} public static class Request extends ActionRequest implements AnalyticsEvent.Context, ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 6b051a4104873..8a544f735b570 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -25,14 +25,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class PutAnalyticsCollectionAction extends ActionType { +public class PutAnalyticsCollectionAction { - public static final PutAnalyticsCollectionAction INSTANCE = new PutAnalyticsCollectionAction(); public static final String NAME = "cluster:admin/xpack/application/analytics/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public PutAnalyticsCollectionAction() { - super(NAME, PutAnalyticsCollectionAction.Response::new); - } + private PutAnalyticsCollectionAction() {/* no instances */} public static class Request extends MasterNodeRequest implements ToXContentObject { private final String name; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 74d9be8db0fac..fdbf27929789f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -22,6 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import java.io.IOException; import java.time.Instant; @@ -47,6 +48,7 @@ *
  • An error string capturing the latest error encountered during the connector's operation, if any.
  • *
  • A {@link ConnectorFeatures} object encapsulating the set of features enabled for this connector.
  • *
  • A list of {@link ConnectorFiltering} objects for applying filtering rules to the data processed by the connector.
  • + *
  • An optional {@link FilteringRules} object that represents active filtering rules applied to a sync job.
  • *
  • The name of the Elasticsearch index where the synchronized data is stored or managed.
  • *
  • A boolean flag 'isNative' indicating whether the connector is a native Elasticsearch connector.
  • *
  • The language associated with the connector.
  • @@ -79,6 +81,8 @@ public class Connector implements NamedWriteable, ToXContentObject { private final ConnectorFeatures features; private final List filtering; @Nullable + private final FilteringRules syncJobFiltering; + @Nullable private final String indexName; private final boolean isNative; @Nullable @@ -110,6 +114,7 @@ public class Connector implements NamedWriteable, ToXContentObject { * @param error Information about the last error encountered by the connector, if any. * @param features Features enabled for the connector. * @param filtering Filtering settings applied by the connector. + * @param syncJobFiltering Filtering settings used by a sync job, it contains subset of data from 'filtering'. * @param indexName Name of the index associated with the connector. * @param isNative Flag indicating whether the connector is a native type. * @param language The language supported by the connector. @@ -132,6 +137,7 @@ private Connector( String error, ConnectorFeatures features, List filtering, + FilteringRules syncJobFiltering, String indexName, boolean isNative, String language, @@ -147,12 +153,13 @@ private Connector( ) { this.connectorId = connectorId; this.apiKeyId = apiKeyId; - this.configuration = Objects.requireNonNull(configuration, "[configuration] cannot be null"); - this.customScheduling = Objects.requireNonNull(customScheduling, "[custom_scheduling] cannot be null"); + this.configuration = configuration; + this.customScheduling = customScheduling; this.description = description; this.error = error; this.features = features; - this.filtering = Objects.requireNonNull(filtering, "[filtering] cannot be null"); + this.filtering = filtering; + this.syncJobFiltering = syncJobFiltering; this.indexName = indexName; this.isNative = isNative; this.language = language; @@ -160,9 +167,9 @@ private Connector( this.syncInfo = syncInfo; this.name = name; this.pipeline = pipeline; - this.scheduling = Objects.requireNonNull(scheduling, "[scheduling] cannot be null"); + this.scheduling = scheduling; this.serviceType = serviceType; - this.status = Objects.requireNonNull(status, "[status] cannot be null"); + this.status = status; this.syncCursor = syncCursor; this.syncNow = syncNow; } @@ -176,6 +183,7 @@ public Connector(StreamInput in) throws IOException { this.error = in.readOptionalString(); this.features = in.readOptionalWriteable(ConnectorFeatures::new); this.filtering = in.readOptionalCollectionAsList(ConnectorFiltering::new); + this.syncJobFiltering = in.readOptionalWriteable(FilteringRules::new); this.indexName = in.readOptionalString(); this.isNative = in.readBoolean(); this.language = in.readOptionalString(); @@ -199,7 +207,7 @@ public Connector(StreamInput in) throws IOException { static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); - static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); + public static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); public static final ParseField LANGUAGE_FIELD = new ParseField("language"); public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); public static final ParseField NAME_FIELD = new ParseField("name"); @@ -273,7 +281,7 @@ public Connector(StreamInput in) throws IOException { PARSER.declareStringOrNull(optionalConstructorArg(), LANGUAGE_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, Connector.LAST_SEEN_FIELD.getPreferredName()), Connector.LAST_SEEN_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -281,7 +289,10 @@ public Connector(StreamInput in) throws IOException { PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant( + p, + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName() + ), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -294,7 +305,7 @@ public Connector(StreamInput in) throws IOException { PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -302,7 +313,7 @@ public Connector(StreamInput in) throws IOException { PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -314,7 +325,7 @@ public Connector(StreamInput in) throws IOException { ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNCED_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_SYNCED_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -388,6 +399,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(error); out.writeOptionalWriteable(features); out.writeOptionalCollection(filtering); + out.writeOptionalWriteable(syncJobFiltering); out.writeOptionalString(indexName); out.writeBoolean(isNative); out.writeOptionalString(language); @@ -434,6 +446,10 @@ public List getFiltering() { return filtering; } + public FilteringRules getSyncJobFiltering() { + return syncJobFiltering; + } + public String getIndexName() { return indexName; } @@ -497,6 +513,7 @@ public boolean equals(Object o) { && Objects.equals(error, connector.error) && Objects.equals(features, connector.features) && Objects.equals(filtering, connector.filtering) + && Objects.equals(syncJobFiltering, connector.syncJobFiltering) && Objects.equals(indexName, connector.indexName) && Objects.equals(language, connector.language) && Objects.equals(lastSeen, connector.lastSeen) @@ -520,6 +537,7 @@ public int hashCode() { error, features, filtering, + syncJobFiltering, indexName, isNative, language, @@ -549,19 +567,20 @@ public static class Builder { private String description; private String error; private ConnectorFeatures features; - private List filtering = List.of(ConnectorFiltering.getDefaultConnectorFilteringConfig()); + private List filtering; + private FilteringRules syncJobFiltering; private String indexName; - private boolean isNative = false; + private boolean isNative; private String language; private Instant lastSeen; private ConnectorSyncInfo syncInfo = new ConnectorSyncInfo.Builder().build(); private String name; private ConnectorIngestPipeline pipeline; - private ConnectorScheduling scheduling = ConnectorScheduling.getDefaultConnectorScheduling(); + private ConnectorScheduling scheduling; private String serviceType; private ConnectorStatus status = ConnectorStatus.CREATED; private Object syncCursor; - private boolean syncNow = false; + private boolean syncNow; public Builder setConnectorId(String connectorId) { this.connectorId = connectorId; @@ -603,6 +622,11 @@ public Builder setFiltering(List filtering) { return this; } + public Builder setSyncJobFiltering(FilteringRules syncJobFiltering) { + this.syncJobFiltering = syncJobFiltering; + return this; + } + public Builder setIndexName(String indexName) { this.indexName = indexName; return this; @@ -610,9 +634,6 @@ public Builder setIndexName(String indexName) { public Builder setIsNative(boolean isNative) { this.isNative = isNative; - if (isNative) { - this.status = ConnectorStatus.NEEDS_CONFIGURATION; - } return this; } @@ -632,7 +653,7 @@ public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { } public Builder setName(String name) { - this.name = Objects.requireNonNullElse(name, ""); + this.name = name; return this; } @@ -676,6 +697,7 @@ public Connector build() { error, features, filtering, + syncJobFiltering, indexName, isNative, language, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java index 81239610c3186..7badf6926c574 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java @@ -101,8 +101,8 @@ public ConnectorCustomSchedule(StreamInput in) throws IOException { PARSER.declareString(constructorArg(), INTERVAL_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), - ConnectorSyncInfo.LAST_SYNCED_FIELD, + (p, c) -> ConnectorUtils.parseNullableInstant(p, LAST_SYNCED_FIELD.getPreferredName()), + LAST_SYNCED_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareString(constructorArg(), NAME_FIELD); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java index 8ade6cdbcc0b1..62a8a68cea5ca 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java @@ -66,6 +66,18 @@ public ConnectorFiltering(StreamInput in) throws IOException { this.draft = new FilteringRules(in); } + public FilteringRules getActive() { + return active; + } + + public String getDomain() { + return domain; + } + + public FilteringRules getDraft() { + return draft; + } + private static final ParseField ACTIVE_FIELD = new ParseField("active"); private static final ParseField DOMAIN_FIELD = new ParseField("domain"); private static final ParseField DRAFT_FIELD = new ParseField("draft"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 8a1b336bfa1e3..d92074dacc129 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -32,19 +32,23 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; -import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNativeAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorServiceTypeAction; +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -67,16 +71,25 @@ public ConnectorIndexService(Client client) { } /** - * Creates or updates the {@link Connector} in the underlying index. + * Creates or updates the {@link Connector} in the underlying index with a specific doc ID. * - * @param docId The ID of the connector. - * @param connector The connector object. + * @param request Request for creating the connector. * @param listener The action listener to invoke on response/failure. */ - public void putConnector(String docId, Connector connector, ActionListener listener) { + public void createConnectorWithDocId(PutConnectorAction.Request request, ActionListener listener) { + + Connector connector = createConnectorWithDefaultValues( + request.getDescription(), + request.getIndexName(), + request.getIsNative(), + request.getLanguage(), + request.getName(), + request.getServiceType() + ); + try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(docId) + .id(request.getConnectorId()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); clientWithOrigin.index(indexRequest, listener); @@ -86,13 +99,25 @@ public void putConnector(String docId, Connector connector, ActionListener listener) { + public void createConnectorWithAutoGeneratedId( + PostConnectorAction.Request request, + ActionListener listener + ) { + + Connector connector = createConnectorWithDefaultValues( + request.getDescription(), + request.getIndexName(), + request.getIsNative(), + request.getLanguage(), + request.getName(), + request.getServiceType() + ); + try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -107,6 +132,43 @@ public void postConnector(Connector connector, ActionListener listener) { try { String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) @@ -315,7 +378,7 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.FILTERING_FIELD.getPreferredName(), request.getFiltering())) ); clientWithOrigin.update( updateRequest, @@ -335,17 +398,16 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ /** * Updates the lastSeen property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. - * @param listener The listener for handling responses, including successful updates or errors. + * @param connectorId The id of the connector object. + * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void checkInConnector(String connectorId, ActionListener listener) { try { - String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.LAST_SEEN_FIELD.getPreferredName(), Instant.now())) ); clientWithOrigin.update( updateRequest, @@ -392,6 +454,47 @@ public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Requ } } + /** + * Updates the is_native property of a {@link Connector}. It always sets the {@link ConnectorStatus} to + * CONFIGURED. + * + * @param request The request for updating the connector's is_native property. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorNative(UpdateConnectorNativeAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + Map.of( + Connector.IS_NATIVE_FIELD.getPreferredName(), + request.isNative(), + Connector.STATUS_FIELD.getPreferredName(), + ConnectorStatus.CONFIGURED + ) + ) + + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. * @@ -405,6 +508,7 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(Map.of(Connector.PIPELINE_FIELD.getPreferredName(), request.getPipeline())) .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) ); clientWithOrigin.update( @@ -435,7 +539,7 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .source(Map.of(Connector.SCHEDULING_FIELD.getPreferredName(), request.getScheduling())) ); clientWithOrigin.update( updateRequest, @@ -452,6 +556,52 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } } + /** + * Updates the service type property of a {@link Connector} and its {@link ConnectorStatus}. + * + * @param request The request for updating the connector's service type. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + getConnector(connectorId, listener.delegateFailure((l, connector) -> { + + ConnectorStatus prevStatus = connector.getStatus(); + ConnectorStatus newStatus = prevStatus == ConnectorStatus.CREATED + ? ConnectorStatus.CREATED + : ConnectorStatus.NEEDS_CONFIGURATION; + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + Map.of( + Connector.SERVICE_TYPE_FIELD.getPreferredName(), + request.getServiceType(), + Connector.STATUS_FIELD.getPreferredName(), + newStatus + ) + ) + + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (updateListener, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + updateListener.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + updateListener.onResponse(updateResponse); + }) + ); + })); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachine.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachine.java new file mode 100644 index 0000000000000..21bfdbc06ec3c --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachine.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; + +/** + * The {@link ConnectorStateMachine} class manages state transitions for connectors + * in accordance with the Connector Protocol. + * It defines valid transitions between different connector states and provides a method to validate these transitions. + */ +public class ConnectorStateMachine { + + private static final Map> VALID_TRANSITIONS = Map.of( + ConnectorStatus.CREATED, + EnumSet.of(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.ERROR), + ConnectorStatus.NEEDS_CONFIGURATION, + EnumSet.of(ConnectorStatus.CONFIGURED), + ConnectorStatus.CONFIGURED, + EnumSet.of(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.CONNECTED, ConnectorStatus.ERROR), + ConnectorStatus.CONNECTED, + EnumSet.of(ConnectorStatus.CONFIGURED, ConnectorStatus.ERROR), + ConnectorStatus.ERROR, + EnumSet.of(ConnectorStatus.CONNECTED, ConnectorStatus.CONFIGURED) + ); + + /** + * Checks if a transition from one connector state to another is valid. + * + * @param current The current state of the connector. + * @param next The proposed next state of the connector. + */ + public static boolean isValidTransition(ConnectorStatus current, ConnectorStatus next) { + return VALID_TRANSITIONS.getOrDefault(current, Collections.emptySet()).contains(next); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorUtils.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorUtils.java new file mode 100644 index 0000000000000..2c9f25b87afdb --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorUtils.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.common.time.TimeUtils; + +import java.io.IOException; +import java.time.Instant; + +public class ConnectorUtils { + + /** + * Parses a field from the XContentParser to an Instant. This method should be used for parsing + * all datetime fields related to Connector APIs. It utilizes the parseTimeFieldToInstant method from {@link TimeUtils} + * to parse the date-time string to an Instant. + * + * @param p the XContentParser instance from which to parse the date-time string. + * @param fieldName the name of the field whose value is to be parsed. + */ + public static Instant parseInstant(XContentParser p, String fieldName) throws IOException { + return TimeUtils.parseTimeFieldToInstant(p, fieldName); + } + + /** + * Parses a nullable field from the XContentParser to an Instant. This method is useful + * when parsing datetime fields that might have null values. + * + * @param p the XContentParser instance from which to parse the date-time string. + * @param fieldName the name of the field whose value is to be parsed. + */ + public static Instant parseNullableInstant(XContentParser p, String fieldName) throws IOException { + return p.currentToken() == XContentParser.Token.VALUE_NULL ? null : parseInstant(p, fieldName); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java index fab57921772d9..a3fb7482f45dc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java @@ -26,14 +26,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class DeleteConnectorAction extends ActionType { +public class DeleteConnectorAction { - public static final DeleteConnectorAction INSTANCE = new DeleteConnectorAction(); public static final String NAME = "cluster:admin/xpack/connector/delete"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private DeleteConnectorAction() { - super(NAME, AcknowledgedResponse::readFrom); - } + private DeleteConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java index 9d97b6787c243..88eacc8f437b4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java @@ -27,14 +27,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class GetConnectorAction extends ActionType { +public class GetConnectorAction { - public static final GetConnectorAction INSTANCE = new GetConnectorAction(); public static final String NAME = "cluster:admin/xpack/connector/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private GetConnectorAction() { - super(NAME, GetConnectorAction.Response::new); - } + private GetConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java index 70cee8b064c71..3b286569ce881 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java @@ -28,14 +28,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class ListConnectorAction extends ActionType { +public class ListConnectorAction { - public static final ListConnectorAction INSTANCE = new ListConnectorAction(); public static final String NAME = "cluster:admin/xpack/connector/list"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public ListConnectorAction() { - super(NAME, ListConnectorAction.Response::new); - } + private ListConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java index 947c2f63d4950..18907ad764e49 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java @@ -12,11 +12,14 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -29,17 +32,16 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class PostConnectorAction extends ActionType { +public class PostConnectorAction { - public static final PostConnectorAction INSTANCE = new PostConnectorAction(); public static final String NAME = "cluster:admin/xpack/connector/post"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public PostConnectorAction() { - super(NAME, PostConnectorAction.Response::new); - } + private PostConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -135,7 +137,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(getIndexName())) { + validationException = addValidationError("[index_name] cannot be [null] or [\"\"]", validationException); + } + try { + MetadataCreateIndexService.validateIndexOrAliasName(getIndexName(), InvalidIndexNameException::new); + } catch (InvalidIndexNameException e) { + validationException = addValidationError(e.toString(), validationException); + } + + return validationException; } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java index 592be3a6b37ab..1e53b62967a06 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java @@ -13,12 +13,14 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -34,14 +36,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class PutConnectorAction extends ActionType { +public class PutConnectorAction { - public static final PutConnectorAction INSTANCE = new PutConnectorAction(); public static final String NAME = "cluster:admin/xpack/connector/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public PutConnectorAction() { - super(NAME, PutConnectorAction.Response::new); - } + private PutConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -154,7 +154,15 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(getConnectorId())) { - validationException = addValidationError("connector_id cannot be null or empty", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"]", validationException); + } + if (Strings.isNullOrEmpty(getIndexName())) { + validationException = addValidationError("[index_name] cannot be [null] or [\"\"]", validationException); + } + try { + MetadataCreateIndexService.validateIndexOrAliasName(getIndexName(), InvalidIndexNameException::new); + } catch (InvalidIndexNameException e) { + validationException = addValidationError(e.toString(), validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNativeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNativeAction.java new file mode 100644 index 0000000000000..464d682567043 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNativeAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateConnectorNativeAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_native_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_native")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorNativeAction.Request request = UpdateConnectorNativeAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorNativeAction.INSTANCE, + request, + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorServiceTypeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorServiceTypeAction.java new file mode 100644 index 0000000000000..89c3303f8cc94 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorServiceTypeAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateConnectorServiceTypeAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_service_type_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_service_type")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorServiceTypeAction.Request request = UpdateConnectorServiceTypeAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorServiceTypeAction.INSTANCE, + request, + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java index 7b66ca81a77f9..3d3592c706941 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java @@ -16,11 +16,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -import java.util.Objects; - public class TransportPostConnectorAction extends HandledTransportAction { protected final ConnectorIndexService connectorIndexService; @@ -44,17 +41,6 @@ public TransportPostConnectorAction( @Override protected void doExecute(Task task, PostConnectorAction.Request request, ActionListener listener) { - - Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - - Connector connector = new Connector.Builder().setDescription(request.getDescription()) - .setIndexName(request.getIndexName()) - .setIsNative(isNative) - .setLanguage(request.getLanguage()) - .setName(request.getName()) - .setServiceType(request.getServiceType()) - .build(); - - connectorIndexService.postConnector(connector, listener); + connectorIndexService.createConnectorWithAutoGeneratedId(request, listener); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java index 8f4ac53b03bbd..c8c1dfed059c8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java @@ -16,11 +16,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -import java.util.Objects; - public class TransportPutConnectorAction extends HandledTransportAction { protected final ConnectorIndexService connectorIndexService; @@ -44,21 +41,6 @@ public TransportPutConnectorAction( @Override protected void doExecute(Task task, PutConnectorAction.Request request, ActionListener listener) { - - Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - - Connector connector = new Connector.Builder().setDescription(request.getDescription()) - .setIndexName(request.getIndexName()) - .setIsNative(isNative) - .setLanguage(request.getLanguage()) - .setName(request.getName()) - .setServiceType(request.getServiceType()) - .build(); - - connectorIndexService.putConnector( - request.getConnectorId(), - connector, - listener.map(r -> new PutConnectorAction.Response(r.getResult())) - ); + connectorIndexService.createConnectorWithDocId(request, listener.map(r -> new PutConnectorAction.Response(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java index 60c75bce8314a..c1ac90a8b018d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java @@ -47,6 +47,9 @@ protected void doExecute( UpdateConnectorLastSeenAction.Request request, ActionListener listener ) { - connectorIndexService.updateConnectorLastSeen(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); + connectorIndexService.checkInConnector( + request.getConnectorId(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNativeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNativeAction.java new file mode 100644 index 0000000000000..cc93b3dafa708 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNativeAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorNativeAction extends HandledTransportAction< + UpdateConnectorNativeAction.Request, + ConnectorUpdateActionResponse> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorNativeAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorNativeAction.NAME, + transportService, + actionFilters, + UpdateConnectorNativeAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorNativeAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorNative(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorServiceTypeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorServiceTypeAction.java new file mode 100644 index 0000000000000..b336584a8a5c9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorServiceTypeAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorServiceTypeAction extends HandledTransportAction< + UpdateConnectorServiceTypeAction.Request, + ConnectorUpdateActionResponse> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorServiceTypeAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorServiceTypeAction.NAME, + transportService, + actionFilters, + UpdateConnectorServiceTypeAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorServiceTypeAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorServiceType(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java index 19e7628746485..623932018bbb3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java @@ -34,14 +34,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorConfigurationAction extends ActionType { +public class UpdateConnectorConfigurationAction { - public static final UpdateConnectorConfigurationAction INSTANCE = new UpdateConnectorConfigurationAction(); public static final String NAME = "cluster:admin/xpack/connector/update_configuration"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorConfigurationAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorConfigurationAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -72,11 +70,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(configuration)) { - validationException = addValidationError("[configuration] cannot be null.", validationException); + validationException = addValidationError("[configuration] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java index ad2036ecbaf81..dceb4d99e2be8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java @@ -31,14 +31,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorErrorAction extends ActionType { +public class UpdateConnectorErrorAction { - public static final UpdateConnectorErrorAction INSTANCE = new UpdateConnectorErrorAction(); public static final String NAME = "cluster:admin/xpack/connector/update_error"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorErrorAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorErrorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -71,7 +69,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java index dabb87f2afc22..044789395e82b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java @@ -32,14 +32,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorFilteringAction extends ActionType { +public class UpdateConnectorFilteringAction { - public static final UpdateConnectorFilteringAction INSTANCE = new UpdateConnectorFilteringAction(); public static final String NAME = "cluster:admin/xpack/connector/update_filtering"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorFilteringAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorFilteringAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -70,11 +68,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (filtering == null) { - validationException = addValidationError("[filtering] cannot be null.", validationException); + validationException = addValidationError("[filtering] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java index bd20513e47033..f872d8ec1cf5d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java @@ -18,35 +18,28 @@ import org.elasticsearch.xpack.application.connector.Connector; import java.io.IOException; -import java.time.Instant; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdateConnectorLastSeenAction extends ActionType { +public class UpdateConnectorLastSeenAction { - public static final UpdateConnectorLastSeenAction INSTANCE = new UpdateConnectorLastSeenAction(); public static final String NAME = "cluster:admin/xpack/connector/update_last_seen"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorLastSeenAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorLastSeenAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String connectorId; - private final Instant lastSeen; - public Request(String connectorId) { this.connectorId = connectorId; - this.lastSeen = Instant.now(); } public Request(StreamInput in) throws IOException { super(in); this.connectorId = in.readString(); - this.lastSeen = in.readInstant(); } public String getConnectorId() { @@ -58,7 +51,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; @@ -68,7 +61,7 @@ public ActionRequestValidationException validate() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(Connector.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + builder.field(Connector.ID_FIELD.getPreferredName(), connectorId); } builder.endObject(); return builder; @@ -78,7 +71,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(connectorId); - out.writeInstant(lastSeen); } @Override @@ -86,12 +78,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(connectorId, request.connectorId) && Objects.equals(lastSeen, request.lastSeen); + return Objects.equals(connectorId, request.connectorId); } @Override public int hashCode() { - return Objects.hash(connectorId, lastSeen); + return Objects.hash(connectorId); } } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java index 7d82c28ca4af1..2ec8740a0c457 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.ConnectorSyncInfo; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorUtils; import java.io.IOException; import java.time.Instant; @@ -33,14 +34,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorLastSyncStatsAction extends ActionType { +public class UpdateConnectorLastSyncStatsAction { - public static final UpdateConnectorLastSyncStatsAction INSTANCE = new UpdateConnectorLastSyncStatsAction(); public static final String NAME = "cluster:admin/xpack/connector/update_last_sync_stats"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorLastSyncStatsAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorLastSyncStatsAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -72,7 +71,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } return validationException; @@ -101,7 +100,10 @@ public ActionRequestValidationException validate() { PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant( + p, + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName() + ), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -114,7 +116,10 @@ public ActionRequestValidationException validate() { PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant( + p, + ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName() + ), ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -122,7 +127,7 @@ public ActionRequestValidationException validate() { PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -134,7 +139,7 @@ public ActionRequestValidationException validate() { ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNCED_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_SYNCED_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java index 6b5c580e396ad..0d79fb416b649 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java @@ -29,17 +29,14 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorNameAction extends ActionType { +public class UpdateConnectorNameAction { - public static final UpdateConnectorNameAction INSTANCE = new UpdateConnectorNameAction(); public static final String NAME = "cluster:admin/xpack/connector/update_name"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorNameAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorNameAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -82,10 +79,13 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } - if (Strings.isNullOrEmpty(name)) { - validationException = addValidationError("[name] cannot be null or empty.", validationException); + if (name == null && description == null) { + validationException = addValidationError( + "[name] and [description] cannot both be [null]. Please provide a value for at least one of them.", + validationException + ); } return validationException; @@ -98,7 +98,7 @@ public ActionRequestValidationException validate() { ); static { - PARSER.declareStringOrNull(constructorArg(), Connector.NAME_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), Connector.NAME_FIELD); PARSER.declareStringOrNull(optionalConstructorArg(), Connector.DESCRIPTION_FIELD); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeAction.java new file mode 100644 index 0000000000000..bcb5c07b91ff2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeAction.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorNativeAction { + + public static final String NAME = "cluster:admin/xpack/connector/update_native"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private UpdateConnectorNativeAction() {/* no instances */} + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final boolean isNative; + + public Request(String connectorId, boolean indexName) { + this.connectorId = connectorId; + this.isNative = indexName; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.isNative = in.readBoolean(); + } + + public String getConnectorId() { + return connectorId; + } + + public boolean isNative() { + return isNative; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_native_request", + false, + ((args, connectorId) -> new UpdateConnectorNativeAction.Request(connectorId, (boolean) args[0])) + ); + + static { + PARSER.declareBoolean(constructorArg(), Connector.IS_NATIVE_FIELD); + } + + public static UpdateConnectorNativeAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorNativeAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorNativeAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.IS_NATIVE_FIELD.getPreferredName(), isNative); + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeBoolean(isNative); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return isNative == request.isNative && Objects.equals(connectorId, request.connectorId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, isNative); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java index ba5b0e702bf0e..8d77fbb30e7f7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java @@ -31,14 +31,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorPipelineAction extends ActionType { +public class UpdateConnectorPipelineAction { - public static final UpdateConnectorPipelineAction INSTANCE = new UpdateConnectorPipelineAction(); public static final String NAME = "cluster:admin/xpack/connector/update_pipeline"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorPipelineAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorPipelineAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -69,11 +67,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(pipeline)) { - validationException = addValidationError("[pipeline] cannot be null.", validationException); + validationException = addValidationError("[pipeline] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index df76e9a09547a..96bef49dd5e06 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -31,14 +31,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorSchedulingAction extends ActionType { +public class UpdateConnectorSchedulingAction { - public static final UpdateConnectorSchedulingAction INSTANCE = new UpdateConnectorSchedulingAction(); public static final String NAME = "cluster:admin/xpack/connector/update_scheduling"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorSchedulingAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorSchedulingAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { @@ -69,11 +67,11 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorId)) { - validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); } if (Objects.isNull(scheduling)) { - validationException = addValidationError("[scheduling] cannot be null.", validationException); + validationException = addValidationError("[scheduling] cannot be [null].", validationException); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeAction.java new file mode 100644 index 0000000000000..0d8977b101046 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeAction.java @@ -0,0 +1,136 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorServiceTypeAction { + + public static final String NAME = "cluster:admin/xpack/connector/update_service_type"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private UpdateConnectorServiceTypeAction() {/* no instances */} + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final String serviceType; + + public Request(String connectorId, String serviceType) { + this.connectorId = connectorId; + this.serviceType = serviceType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.serviceType = in.readString(); + } + + public String getConnectorId() { + return connectorId; + } + + public String getServiceType() { + return serviceType; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_service_type_request", + false, + ((args, connectorId) -> new UpdateConnectorServiceTypeAction.Request(connectorId, (String) args[0])) + ); + + static { + PARSER.declareString(constructorArg(), Connector.SERVICE_TYPE_FIELD); + } + + public static UpdateConnectorServiceTypeAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorServiceTypeAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorServiceTypeAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), serviceType); + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); + } + + if (Strings.isNullOrEmpty(serviceType)) { + validationException = addValidationError("[service_type] cannot be [null] or [\"\"].", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeString(serviceType); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(serviceType, request.serviceType); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, serviceType); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java index 476ae113398dc..8f05e67ecb14d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -36,7 +36,7 @@ public class ConfigurationValidation implements Writeable, ToXContentObject { * Constructs a new ConfigurationValidation instance with specified constraint and type. * This constructor initializes the object with a given validation constraint and its associated validation type. * - * @param constraint The validation constraint, represented as an Object. + * @param constraint The validation constraint (string, number or list), represented as generic Object type. * @param type The type of configuration validation, specified as an instance of {@link ConfigurationValidationType}. */ private ConfigurationValidation(Object constraint, ConfigurationValidationType type) { @@ -59,14 +59,12 @@ public ConfigurationValidation(StreamInput in) throws IOException { ); static { - PARSER.declareField(constructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, CONSTRAINT_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareField( + constructorArg(), + (p, c) -> parseConstraintValue(p), + CONSTRAINT_FIELD, + ObjectParser.ValueType.VALUE_OBJECT_ARRAY + ); PARSER.declareField( constructorArg(), (p, c) -> ConfigurationValidationType.validationType(p.text()), @@ -75,6 +73,22 @@ public ConfigurationValidation(StreamInput in) throws IOException { ); } + /** + * Parses the value of a constraint from the XContentParser stream. + * This method is designed to handle various types of constraint values as per the connector's protocol original specification. + * The constraints can be of type string, number, or list of values. + */ + private static Object parseConstraintValue(XContentParser p) throws IOException { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.START_ARRAY) { + return p.list(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java index 2118014f4a286..7c064014a95ba 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java @@ -14,8 +14,7 @@ public enum ConfigurationValidationType { GREATER_THAN, LIST_TYPE, INCLUDED_IN, - REGEX, - UNSET; + REGEX; @Override public String toString() { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java index ca7d3bfa6d9c8..480eaf91bb23b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorUtils; import java.io.IOException; import java.time.Instant; @@ -71,8 +72,18 @@ public FilteringAdvancedSnippet(StreamInput in) throws IOException { ); static { - PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); - PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), UPDATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseInstant(p, CREATED_AT_FIELD.getPreferredName()), + CREATED_AT_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseInstant(p, UPDATED_AT_FIELD.getPreferredName()), + UPDATED_AT_FIELD, + ObjectParser.ValueType.STRING + ); PARSER.declareField(constructorArg(), (p, c) -> p.map(), VALUE_FIELD, ObjectParser.ValueType.OBJECT); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java index cfcc639b8b613..02571078f4e21 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorUtils; import java.io.IOException; import java.time.Instant; @@ -108,7 +109,12 @@ public FilteringRule(StreamInput in) throws IOException { ); static { - PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseInstant(p, CREATED_AT_FIELD.getPreferredName()), + CREATED_AT_FIELD, + ObjectParser.ValueType.STRING + ); PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareString(constructorArg(), ID_FIELD); PARSER.declareInt(constructorArg(), ORDER_FIELD); @@ -124,7 +130,12 @@ public FilteringRule(StreamInput in) throws IOException { RULE_FIELD, ObjectParser.ValueType.STRING ); - PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), UPDATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseInstant(p, UPDATED_AT_FIELD.getPreferredName()), + UPDATED_AT_FIELD, + ObjectParser.ValueType.STRING + ); PARSER.declareString(constructorArg(), VALUE_FIELD); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java index dc96006f40349..fb4e25131449d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java @@ -57,6 +57,18 @@ public FilteringRules(StreamInput in) throws IOException { this.filteringValidationInfo = new FilteringValidationInfo(in); } + public FilteringAdvancedSnippet getAdvancedSnippet() { + return advancedSnippet; + } + + public List getRules() { + return rules; + } + + public FilteringValidationInfo getFilteringValidationInfo() { + return filteringValidationInfo; + } + private static final ParseField ADVANCED_SNIPPET_FIELD = new ParseField("advanced_snippet"); private static final ParseField RULES_FIELD = new ParseField("rules"); private static final ParseField VALIDATION_FIELD = new ParseField("validation"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsFeature.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsFeature.java new file mode 100644 index 0000000000000..7fd109db40470 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsFeature.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets; + +import org.elasticsearch.common.util.FeatureFlag; + +/** + * Connector Secrets feature flag. When the feature is complete, this flag will be removed. + */ +public class ConnectorSecretsFeature { + + private static final FeatureFlag SECRETS_FEATURE_FLAG = new FeatureFlag("connector_secrets"); + + /** + * Enables the Connectors Secrets feature by default for the tech preview phase. + * As documented, the Connectors Secrets is currently a tech preview feature, + * and customers should be aware that no SLAs or support are guaranteed during + * its pre-General Availability (GA) stage. + * + * Instead of removing the feature flag from the code, we enable it by default. + * This approach allows for the complete deactivation of the feature during the QA phase, + * should any critical bugs be discovered, with a single, trackable code change. + */ + public static boolean isEnabled() { + return true; + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java new file mode 100644 index 0000000000000..c994fc1155277 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; +import org.elasticsearch.xpack.core.template.TemplateUtils; + +import java.util.Map; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; + +/** + * A service that manages persistent Connector Secrets. + */ +public class ConnectorSecretsIndexService { + + private final Client clientWithOrigin; + + public static final String CONNECTOR_SECRETS_INDEX_NAME = ".connector-secrets"; + private static final int CURRENT_INDEX_VERSION = 1; + private static final String MAPPING_VERSION_VARIABLE = "connector-secrets.version"; + private static final String MAPPING_MANAGED_VERSION_VARIABLE = "connector-secrets.managed.index.version"; + + public ConnectorSecretsIndexService(Client client) { + this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); + } + + /** + * Returns the {@link SystemIndexDescriptor} for the Connector Secrets system index. + * + * @return The {@link SystemIndexDescriptor} for the Connector Secrets system index. + */ + public static SystemIndexDescriptor getSystemIndexDescriptor() { + PutIndexTemplateRequest request = new PutIndexTemplateRequest(); + + String templateSource = TemplateUtils.loadTemplate( + "/connector-secrets.json", + Version.CURRENT.toString(), + MAPPING_VERSION_VARIABLE, + Map.of(MAPPING_MANAGED_VERSION_VARIABLE, Integer.toString(CURRENT_INDEX_VERSION)) + ); + request.source(templateSource, XContentType.JSON); + + return SystemIndexDescriptor.builder() + .setIndexPattern(CONNECTOR_SECRETS_INDEX_NAME + "*") + .setPrimaryIndex(CONNECTOR_SECRETS_INDEX_NAME + "-" + CURRENT_INDEX_VERSION) + .setDescription("Secret values managed by Connectors") + .setMappings(request.mappings()) + .setSettings(request.settings()) + .setAliasName(CONNECTOR_SECRETS_INDEX_NAME) + .setVersionMetaKey("version") + .setOrigin(CONNECTORS_ORIGIN) + .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) + .build(); + } + + public void getSecret(String id, ActionListener listener) { + clientWithOrigin.prepareGet(CONNECTOR_SECRETS_INDEX_NAME, id).execute(listener.delegateFailureAndWrap((delegate, getResponse) -> { + if (getResponse.isSourceEmpty()) { + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + id + "]")); + return; + } + delegate.onResponse(new GetConnectorSecretResponse(getResponse.getId(), getResponse.getSource().get("value").toString())); + })); + } + + public void createSecret(PostConnectorSecretRequest request, ActionListener listener) { + try { + clientWithOrigin.prepareIndex(CONNECTOR_SECRETS_INDEX_NAME) + .setSource(request.toXContent(jsonBuilder())) + .execute( + listener.delegateFailureAndWrap( + (l, indexResponse) -> l.onResponse(new PostConnectorSecretResponse(indexResponse.getId())) + ) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + public void deleteSecret(String id, ActionListener listener) { + try { + clientWithOrigin.prepareDelete(CONNECTOR_SECRETS_INDEX_NAME, id) + .execute(listener.delegateFailureAndWrap((delegate, deleteResponse) -> { + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + id + "]")); + return; + } + delegate.onResponse(new DeleteConnectorSecretResponse(deleteResponse.getResult() == DocWriteResponse.Result.DELETED)); + })); + } catch (Exception e) { + listener.onFailure(e); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..b97911a350972 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionType; + +public class DeleteConnectorSecretAction { + + public static final String NAME = "cluster:admin/xpack/connector/secret/delete"; + + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private DeleteConnectorSecretAction() {/* no instances */} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java new file mode 100644 index 0000000000000..183362f64ea8f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class DeleteConnectorSecretRequest extends ActionRequest { + + private final String id; + + public DeleteConnectorSecretRequest(String id) { + this.id = Objects.requireNonNull(id); + } + + public DeleteConnectorSecretRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public String id() { + return id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(id)) { + validationException = addValidationError("id missing", validationException); + } + + return validationException; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteConnectorSecretRequest that = (DeleteConnectorSecretRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java new file mode 100644 index 0000000000000..7568d3f193779 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteConnectorSecretResponse extends ActionResponse implements ToXContentObject { + + private final boolean deleted; + + public DeleteConnectorSecretResponse(boolean deleted) { + this.deleted = deleted; + } + + public DeleteConnectorSecretResponse(StreamInput in) throws IOException { + super(in); + this.deleted = in.readBoolean(); + } + + public boolean isDeleted() { + return deleted; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(deleted); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("deleted", deleted); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteConnectorSecretResponse that = (DeleteConnectorSecretResponse) o; + return deleted == that.deleted; + } + + @Override + public int hashCode() { + return Objects.hash(deleted); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretAction.java new file mode 100644 index 0000000000000..b116f1477b68a --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionType; + +public class GetConnectorSecretAction { + + public static final String NAME = "cluster:admin/xpack/connector/secret/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private GetConnectorSecretAction() {/* no instances */} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequest.java new file mode 100644 index 0000000000000..cf1cc0f563eba --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class GetConnectorSecretRequest extends ActionRequest { + + private final String id; + + public GetConnectorSecretRequest(String id) { + this.id = Objects.requireNonNull(id); + } + + public GetConnectorSecretRequest(StreamInput in) throws IOException { + super(in); + id = in.readString(); + } + + public String id() { + return id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(id)) { + validationException = addValidationError("id missing", validationException); + } + + return validationException; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetConnectorSecretRequest that = (GetConnectorSecretRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponse.java new file mode 100644 index 0000000000000..3bbcb8212d51c --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponse.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class GetConnectorSecretResponse extends ActionResponse implements ToXContentObject { + + private final String id; + private final String value; + + public GetConnectorSecretResponse(StreamInput in) throws IOException { + super(in); + id = in.readString(); + value = in.readString(); + } + + public GetConnectorSecretResponse(String id, String value) { + this.id = id; + this.value = value; + } + + public String id() { + return id; + } + + public String value() { + return value; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeString(value); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("id", id); + builder.field("value", value); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetConnectorSecretResponse that = (GetConnectorSecretResponse) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretAction.java new file mode 100644 index 0000000000000..2069655b876fa --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretAction.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionType; + +public class PostConnectorSecretAction { + public static final String NAME = "cluster:admin/xpack/connector/secret/post"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private PostConnectorSecretAction() {/* no instances */} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java new file mode 100644 index 0000000000000..2e565dece7eca --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class PostConnectorSecretRequest extends ActionRequest { + + public static final ParseField VALUE_FIELD = new ParseField("value"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "post_secret_request", + args -> { + return new PostConnectorSecretRequest((String) args[0]); + } + ); + + static { + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> p.text(), + VALUE_FIELD, + ObjectParser.ValueType.STRING + ); + } + + public static PostConnectorSecretRequest fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final String value; + + public PostConnectorSecretRequest(String value) { + this.value = value; + } + + public PostConnectorSecretRequest(StreamInput in) throws IOException { + super(in); + this.value = in.readString(); + } + + public String value() { + return value; + } + + public XContentBuilder toXContent(XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field(VALUE_FIELD.getPreferredName(), this.value); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(value); + } + + @Override + public ActionRequestValidationException validate() { + if (Strings.isNullOrEmpty(this.value)) { + ActionRequestValidationException exception = new ActionRequestValidationException(); + exception.addValidationError("value is missing"); + return exception; + } + + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PostConnectorSecretRequest that = (PostConnectorSecretRequest) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponse.java new file mode 100644 index 0000000000000..068b510c5fad5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponse.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class PostConnectorSecretResponse extends ActionResponse implements ToXContentObject { + + private final String id; + + public PostConnectorSecretResponse(String id) { + this.id = id; + } + + public PostConnectorSecretResponse(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public String id() { + return id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("id", id); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PostConnectorSecretResponse that = (PostConnectorSecretResponse) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..cd1c9b5f19498 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +@ServerlessScope(Scope.INTERNAL) +public class RestDeleteConnectorSecretAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_delete_secret"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.DELETE, "/_connector/_secret/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final String id = request.param("id"); + return restChannel -> client.execute( + DeleteConnectorSecretAction.INSTANCE, + new DeleteConnectorSecretRequest(id), + new RestToXContentListener<>(restChannel) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestGetConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestGetConnectorSecretAction.java new file mode 100644 index 0000000000000..6ab5c1055c3a4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestGetConnectorSecretAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +@ServerlessScope(Scope.INTERNAL) +public class RestGetConnectorSecretAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_get_secret"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/_connector/_secret/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final String id = request.param("id"); + return restChannel -> client.execute( + GetConnectorSecretAction.INSTANCE, + new GetConnectorSecretRequest(id), + new RestToXContentListener<>(restChannel) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPostConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPostConnectorSecretAction.java new file mode 100644 index 0000000000000..eeacde1bdb3c5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPostConnectorSecretAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +@ServerlessScope(Scope.INTERNAL) +public class RestPostConnectorSecretAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_post_secret"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, "/_connector/_secret")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + try (XContentParser parser = request.contentParser()) { + PostConnectorSecretRequest postSecretRequest = PostConnectorSecretRequest.fromXContent(parser); + return restChannel -> client.execute( + PostConnectorSecretAction.INSTANCE, + postSecretRequest, + new RestToXContentListener<>(restChannel) + ); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..7c87598440cfd --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; + +public class TransportDeleteConnectorSecretAction extends HandledTransportAction< + DeleteConnectorSecretRequest, + DeleteConnectorSecretResponse> { + + private final ConnectorSecretsIndexService connectorSecretsIndexService; + + @Inject + public TransportDeleteConnectorSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + DeleteConnectorSecretAction.NAME, + transportService, + actionFilters, + DeleteConnectorSecretRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client); + } + + protected void doExecute(Task task, DeleteConnectorSecretRequest request, ActionListener listener) { + connectorSecretsIndexService.deleteSecret(request.id(), listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretAction.java new file mode 100644 index 0000000000000..aaa03fa13298f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; + +public class TransportGetConnectorSecretAction extends HandledTransportAction { + + private final ConnectorSecretsIndexService connectorSecretsIndexService; + + @Inject + public TransportGetConnectorSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + GetConnectorSecretAction.NAME, + transportService, + actionFilters, + GetConnectorSecretRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client); + } + + protected void doExecute(Task task, GetConnectorSecretRequest request, ActionListener listener) { + connectorSecretsIndexService.getSecret(request.id(), listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretAction.java new file mode 100644 index 0000000000000..7cc3195ccbbf2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; + +public class TransportPostConnectorSecretAction extends HandledTransportAction { + + private final ConnectorSecretsIndexService connectorSecretsIndexService; + + @Inject + public TransportPostConnectorSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + PostConnectorSecretAction.NAME, + transportService, + actionFilters, + PostConnectorSecretRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client); + } + + protected void doExecute(Task task, PostConnectorSecretRequest request, ActionListener listener) { + connectorSecretsIndexService.createSecret(request, listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 84d91b7fe0f08..48f3f2a117d63 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -25,15 +25,15 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; -import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorUtils; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import java.io.IOException; import java.time.Instant; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Objects; @@ -265,19 +265,19 @@ public ConnectorSyncJob(StreamInput in) throws IOException { static { PARSER.declareField( optionalConstructorArg(), - (p, c) -> parseNullableInstant(p), + (p, c) -> ConnectorUtils.parseNullableInstant(p, CANCELATION_REQUESTED_AT_FIELD.getPreferredName()), CANCELATION_REQUESTED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> parseNullableInstant(p), + (p, c) -> ConnectorUtils.parseNullableInstant(p, CANCELED_AT_FIELD.getPreferredName()), CANCELED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> parseNullableInstant(p), + (p, c) -> ConnectorUtils.parseNullableInstant(p, COMPLETED_AT_FIELD.getPreferredName()), COMPLETED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -287,7 +287,12 @@ public ConnectorSyncJob(StreamInput in) throws IOException { CONNECTOR_FIELD, ObjectParser.ValueType.OBJECT ); - PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseInstant(p, CREATED_AT_FIELD.getPreferredName()), + CREATED_AT_FIELD, + ObjectParser.ValueType.STRING + ); PARSER.declareLong(constructorArg(), DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareStringOrNull(optionalConstructorArg(), ERROR_FIELD); PARSER.declareString(constructorArg(), ID_FIELD); @@ -299,11 +304,16 @@ public ConnectorSyncJob(StreamInput in) throws IOException { JOB_TYPE_FIELD, ObjectParser.ValueType.STRING ); - PARSER.declareField(constructorArg(), (p, c) -> parseNullableInstant(p), LAST_SEEN_FIELD, ObjectParser.ValueType.STRING_OR_NULL); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorUtils.parseNullableInstant(p, LAST_SEEN_FIELD.getPreferredName()), + LAST_SEEN_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); PARSER.declareField(constructorArg(), (p, c) -> p.map(), METADATA_FIELD, ObjectParser.ValueType.OBJECT); PARSER.declareField( optionalConstructorArg(), - (p, c) -> parseNullableInstant(p), + (p, c) -> ConnectorUtils.parseNullableInstant(p, STARTED_AT_FIELD.getPreferredName()), STARTED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); @@ -323,10 +333,6 @@ public ConnectorSyncJob(StreamInput in) throws IOException { PARSER.declareStringOrNull(optionalConstructorArg(), WORKER_HOSTNAME_FIELD); } - private static Instant parseNullableInstant(XContentParser p) throws IOException { - return p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()); - } - @SuppressWarnings("unchecked") private static final ConstructingObjectParser SYNC_JOB_CONNECTOR_PARSER = new ConstructingObjectParser<>( "sync_job_connector", @@ -343,7 +349,7 @@ private static Instant parseNullableInstant(XContentParser p) throws IOException String syncJobConnectorId = Strings.isNullOrEmpty(connectorId) ? parsedConnectorId : connectorId; return new Connector.Builder().setConnectorId(syncJobConnectorId) - .setFiltering((List) args[i++]) + .setSyncJobFiltering((FilteringRules) args[i++]) .setIndexName((String) args[i++]) .setLanguage((String) args[i++]) .setPipeline((ConnectorIngestPipeline) args[i++]) @@ -355,9 +361,10 @@ private static Instant parseNullableInstant(XContentParser p) throws IOException static { SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.ID_FIELD); - SYNC_JOB_CONNECTOR_PARSER.declareObjectArray( + SYNC_JOB_CONNECTOR_PARSER.declareObjectOrNull( optionalConstructorArg(), - (p, c) -> ConnectorFiltering.fromXContent(p), + (p, c) -> FilteringRules.fromXContent(p), + null, Connector.FILTERING_FIELD ); SYNC_JOB_CONNECTOR_PARSER.declareStringOrNull(optionalConstructorArg(), Connector.INDEX_NAME_FIELD); @@ -491,8 +498,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (connector.getConnectorId() != null) { builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); } - if (connector.getFiltering() != null) { - builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); + if (connector.getSyncJobFiltering() != null) { + builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getSyncJobFiltering()); } if (connector.getIndexName() != null) { builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index ee35d8fb6372c..01a297a11103b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -38,9 +38,11 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; @@ -52,6 +54,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.function.BiConsumer; import java.util.stream.Stream; @@ -427,12 +430,23 @@ public void onResponse(GetResponse response) { return; } try { - final Connector syncJobConnectorInfo = ConnectorSyncJob.syncJobConnectorFromXContentBytes( + final Connector connector = Connector.fromXContentBytes( response.getSourceAsBytesRef(), connectorId, XContentType.JSON ); - listener.onResponse(syncJobConnectorInfo); + + // Build the connector representation for sync job + final Connector syncJobConnector = new Connector.Builder().setConnectorId(connector.getConnectorId()) + .setSyncJobFiltering(transformConnectorFilteringToSyncJobRepresentation(connector.getFiltering())) + .setIndexName(connector.getIndexName()) + .setLanguage(connector.getLanguage()) + .setPipeline(connector.getPipeline()) + .setServiceType(connector.getServiceType()) + .setConfiguration(connector.getConfiguration()) + .build(); + + listener.onResponse(syncJobConnector); } catch (Exception e) { listener.onFailure(e); } @@ -448,6 +462,20 @@ public void onFailure(Exception e) { } } + /** + * Transforms the first {@link ConnectorFiltering} object from a list into a {@link FilteringRules} representation for a sync job. + * This method specifically extracts the 'active' filtering rules from the first {@link ConnectorFiltering} object in the list, + * if the list is neither null nor empty. + * + * @param connectorFiltering The list of {@link ConnectorFiltering} objects to be transformed. Can be null or empty. + */ + FilteringRules transformConnectorFilteringToSyncJobRepresentation(List connectorFiltering) { + return Optional.ofNullable(connectorFiltering) + .filter(list -> list.isEmpty() == false) + .map(list -> list.get(0).getActive()) + .orElse(null); + } + /** * Sets the error for the {@link ConnectorSyncJob} in the underlying index. * This also sets the {@link ConnectorSyncStatus} to 'ERROR'. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java new file mode 100644 index 0000000000000..542181ca44d2f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; + +/** + * The {@link ConnectorSyncJobStateMachine} class manages state transitions for sync jobs + * in accordance with the Connector Protocol. + * It defines valid transitions between different connector sync job states and provides a method to validate these transitions. + */ +public class ConnectorSyncJobStateMachine { + + private static final Map> VALID_TRANSITIONS = Map.of( + ConnectorSyncStatus.PENDING, + EnumSet.of(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.CANCELED), + ConnectorSyncStatus.IN_PROGRESS, + EnumSet.of(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.COMPLETED, ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.ERROR), + ConnectorSyncStatus.COMPLETED, + Collections.emptySet(), + ConnectorSyncStatus.SUSPENDED, + EnumSet.of(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.CANCELED), + ConnectorSyncStatus.CANCELING, + EnumSet.of(ConnectorSyncStatus.CANCELED, ConnectorSyncStatus.ERROR), + ConnectorSyncStatus.CANCELED, + Collections.emptySet(), + ConnectorSyncStatus.ERROR, + Collections.emptySet() + ); + + /** + * Checks if a transition from one connector sync job state to another is valid. + * + * @param current The current state of the connector sync job. + * @param next The proposed next state of the connector sync job. + */ + public static boolean isValidTransition(ConnectorSyncStatus current, ConnectorSyncStatus next) { + return VALID_TRANSITIONS.getOrDefault(current, Collections.emptySet()).contains(next); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java index 111828680455c..2cf39a35bba43 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java @@ -27,14 +27,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; -public class CancelConnectorSyncJobAction extends ActionType { +public class CancelConnectorSyncJobAction { - public static final CancelConnectorSyncJobAction INSTANCE = new CancelConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/cancel"; + public static final ActionType INSTANCE = new ActionType(NAME); - private CancelConnectorSyncJobAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private CancelConnectorSyncJobAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java index 54ba26ec1533a..520b2596932b3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java @@ -27,14 +27,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class CheckInConnectorSyncJobAction extends ActionType { +public class CheckInConnectorSyncJobAction { - public static final CheckInConnectorSyncJobAction INSTANCE = new CheckInConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/check_in"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private CheckInConnectorSyncJobAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private CheckInConnectorSyncJobAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java index 05cd6cce90fdd..68467612e92d8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java @@ -27,14 +27,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class DeleteConnectorSyncJobAction extends ActionType { +public class DeleteConnectorSyncJobAction { - public static final DeleteConnectorSyncJobAction INSTANCE = new DeleteConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/delete"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private DeleteConnectorSyncJobAction() { - super(NAME, AcknowledgedResponse::readFrom); - } + private DeleteConnectorSyncJobAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java index 70be6a5a6ffa1..9e21ba7e94f1f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java @@ -28,14 +28,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class GetConnectorSyncJobAction extends ActionType { +public class GetConnectorSyncJobAction { - public static final GetConnectorSyncJobAction INSTANCE = new GetConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private GetConnectorSyncJobAction() { - super(NAME, GetConnectorSyncJobAction.Response::new); - } + private GetConnectorSyncJobAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String connectorSyncJobId; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java index 0a22b6f938142..298eee466bfb2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java @@ -30,14 +30,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class ListConnectorSyncJobsAction extends ActionType { +public class ListConnectorSyncJobsAction { - public static final ListConnectorSyncJobsAction INSTANCE = new ListConnectorSyncJobsAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/list"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public ListConnectorSyncJobsAction() { - super(NAME, ListConnectorSyncJobsAction.Response::new); - } + private ListConnectorSyncJobsAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java index ffab39ef86261..a395aacaa043e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java @@ -35,15 +35,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class PostConnectorSyncJobAction extends ActionType { - - public static final PostConnectorSyncJobAction INSTANCE = new PostConnectorSyncJobAction(); +public class PostConnectorSyncJobAction { public static final String NAME = "cluster:admin/xpack/connector/sync_job/post"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private PostConnectorSyncJobAction() { - super(NAME, PostConnectorSyncJobAction.Response::new); - } + private PostConnectorSyncJobAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final String EMPTY_CONNECTOR_ID_ERROR_MESSAGE = "[id] of the connector cannot be null or empty"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java index fe0893c82e27d..40b016ea2af91 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java @@ -32,15 +32,14 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorSyncJobErrorAction extends ActionType { +public class UpdateConnectorSyncJobErrorAction { - public static final UpdateConnectorSyncJobErrorAction INSTANCE = new UpdateConnectorSyncJobErrorAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_error"; - public static final String ERROR_EMPTY_MESSAGE = "[error] of the connector sync job cannot be null or empty"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private UpdateConnectorSyncJobErrorAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorSyncJobErrorAction() {/* no instances */} + + public static final String ERROR_EMPTY_MESSAGE = "[error] of the connector sync job cannot be null or empty"; public static class Request extends ActionRequest implements ToXContentObject { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java index b9c57cb6a0c61..160f4f4e984b0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java @@ -24,6 +24,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorUtils; import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; @@ -36,14 +38,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; -public class UpdateConnectorSyncJobIngestionStatsAction extends ActionType { +public class UpdateConnectorSyncJobIngestionStatsAction { - public static final UpdateConnectorSyncJobIngestionStatsAction INSTANCE = new UpdateConnectorSyncJobIngestionStatsAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_stats"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public UpdateConnectorSyncJobIngestionStatsAction() { - super(NAME, ConnectorUpdateActionResponse::new); - } + private UpdateConnectorSyncJobIngestionStatsAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); @@ -166,7 +166,7 @@ public ActionRequestValidationException validate() { PARSER.declareLong(optionalConstructorArg(), ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> ConnectorUtils.parseInstant(p, Connector.LAST_SEEN_FIELD.getPreferredName()), ConnectorSyncJob.LAST_SEEN_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java index ef42a7d7c64f2..5b07d81d90df0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java @@ -37,7 +37,7 @@ public class QueryRuleCriteria implements Writeable, ToXContentObject { - public static final TransportVersion CRITERIA_METADATA_VALUES_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + public static final TransportVersion CRITERIA_METADATA_VALUES_TRANSPORT_VERSION = TransportVersions.V_8_10_X; private final QueryRuleCriteriaType criteriaType; private final String criteriaMetadata; private final List criteriaValues; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index fcd0f6be8fbcb..f3bc07387512f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -27,7 +27,7 @@ */ public class QueryRulesetListItem implements Writeable, ToXContentObject { - public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_10_X; public static final ParseField RULESET_ID_FIELD = new ParseField("ruleset_id"); public static final ParseField RULE_TOTAL_COUNT_FIELD = new ParseField("rule_total_count"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index b23ed92a5d9b8..3882b6c61bb2c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -73,7 +73,7 @@ public class RuleQueryBuilder extends AbstractQueryBuilder { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_061; + return TransportVersions.V_8_10_X; } public RuleQueryBuilder(QueryBuilder organicQuery, Map matchCriteria, String rulesetId) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/DeleteQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/DeleteQueryRulesetAction.java index 15bee07bc4135..1b4ae187b2631 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/DeleteQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/DeleteQueryRulesetAction.java @@ -26,14 +26,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class DeleteQueryRulesetAction extends ActionType { +public class DeleteQueryRulesetAction { - public static final DeleteQueryRulesetAction INSTANCE = new DeleteQueryRulesetAction(); public static final String NAME = "cluster:admin/xpack/query_rules/delete"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private DeleteQueryRulesetAction() { - super(NAME, AcknowledgedResponse::readFrom); - } + private DeleteQueryRulesetAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String rulesetId; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index 191bf4a806f0f..814d9a045eb29 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -29,14 +29,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class GetQueryRulesetAction extends ActionType { +public class GetQueryRulesetAction { - public static final GetQueryRulesetAction INSTANCE = new GetQueryRulesetAction(); public static final String NAME = "cluster:admin/xpack/query_rules/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private GetQueryRulesetAction() { - super(NAME, GetQueryRulesetAction.Response::new); - } + private GetQueryRulesetAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String rulesetId; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java index f352fe4d73479..11397583ce5b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java @@ -28,14 +28,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class ListQueryRulesetsAction extends ActionType { +public class ListQueryRulesetsAction { - public static final ListQueryRulesetsAction INSTANCE = new ListQueryRulesetsAction(); public static final String NAME = "cluster:admin/xpack/query_rules/list"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public ListQueryRulesetsAction() { - super(NAME, ListQueryRulesetsAction.Response::new); - } + private ListQueryRulesetsAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final PageParams pageParams; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java index d29df284f8660..1a42d4c631a9b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java @@ -33,14 +33,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class PutQueryRulesetAction extends ActionType { +public class PutQueryRulesetAction { - public static final PutQueryRulesetAction INSTANCE = new PutQueryRulesetAction(); public static final String NAME = "cluster:admin/xpack/query_rules/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public PutQueryRulesetAction() { - super(NAME, PutQueryRulesetAction.Response::new); - } + private PutQueryRulesetAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java index bd1fd33f4e963..f2f8954e1af0e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java @@ -55,7 +55,7 @@ public class SearchApplication implements Writeable, ToXContentObject { + "We recommend storing a template to avoid breaking changes."; public static final String NO_ALIAS_WARNING = "Alias is missing for the search application"; - private static final TransportVersion INDICES_REMOVED_TRANSPORT_VERSION = TransportVersions.SEARCH_APP_INDICES_REMOVED; + private static final TransportVersion INDICES_REMOVED_TRANSPORT_VERSION = TransportVersions.V_8_11_X; private final String name; @Nullable diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java index 41c70d83c01e1..93ad2f46b5583 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java @@ -25,14 +25,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class DeleteSearchApplicationAction extends ActionType { +public class DeleteSearchApplicationAction { - public static final DeleteSearchApplicationAction INSTANCE = new DeleteSearchApplicationAction(); public static final String NAME = "cluster:admin/xpack/application/search_application/delete"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private DeleteSearchApplicationAction() { - super(NAME, AcknowledgedResponse::readFrom); - } + private DeleteSearchApplicationAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String name; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java index 46912615a98fd..83c04106edd4c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java @@ -27,14 +27,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class GetSearchApplicationAction extends ActionType { +public class GetSearchApplicationAction { - public static final GetSearchApplicationAction INSTANCE = new GetSearchApplicationAction(); public static final String NAME = "cluster:admin/xpack/application/search_application/get"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - private GetSearchApplicationAction() { - super(NAME, GetSearchApplicationAction.Response::new); - } + private GetSearchApplicationAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final String name; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java index f44fa2a7b67e8..5f82f628814cf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java @@ -32,14 +32,12 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class ListSearchApplicationAction extends ActionType { +public class ListSearchApplicationAction { - public static final ListSearchApplicationAction INSTANCE = new ListSearchApplicationAction(); public static final String NAME = "cluster:admin/xpack/application/search_application/list"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public ListSearchApplicationAction() { - super(NAME, ListSearchApplicationAction.Response::new); - } + private ListSearchApplicationAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java index 78ada27534892..75bec0d61649f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java @@ -30,14 +30,12 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class PutSearchApplicationAction extends ActionType { +public class PutSearchApplicationAction { - public static final PutSearchApplicationAction INSTANCE = new PutSearchApplicationAction(); public static final String NAME = "cluster:admin/xpack/application/search_application/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public PutSearchApplicationAction() { - super(NAME, PutSearchApplicationAction.Response::new); - } + private PutSearchApplicationAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/QuerySearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/QuerySearchApplicationAction.java index de4c1a13082e2..2a9969239a27f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/QuerySearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/QuerySearchApplicationAction.java @@ -10,13 +10,10 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchResponse; -public class QuerySearchApplicationAction extends ActionType { +public class QuerySearchApplicationAction { - public static final QuerySearchApplicationAction INSTANCE = new QuerySearchApplicationAction(); public static final String NAME = "indices:data/read/xpack/application/search_application/search"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public QuerySearchApplicationAction() { - super(NAME, SearchResponse::new); - } - + private QuerySearchApplicationAction() {/* no instances */} } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RenderSearchApplicationQueryAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RenderSearchApplicationQueryAction.java index b70152c1c3c23..dd8984b7df11a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RenderSearchApplicationQueryAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RenderSearchApplicationQueryAction.java @@ -19,14 +19,12 @@ import java.io.IOException; import java.util.Objects; -public class RenderSearchApplicationQueryAction extends ActionType { +public class RenderSearchApplicationQueryAction { - public static final RenderSearchApplicationQueryAction INSTANCE = new RenderSearchApplicationQueryAction(); public static final String NAME = "cluster:admin/xpack/application/search_application/render_query"; + public static final ActionType INSTANCE = new ActionType<>(NAME); - public RenderSearchApplicationQueryAction() { - super(NAME, RenderSearchApplicationQueryAction.Response::new); - } + private RenderSearchApplicationQueryAction() {/* no instances */} public static class Response extends ActionResponse implements ToXContentObject, NamedWriteable { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java index 2e181fda1ef88..67c918dac94c9 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -70,6 +71,7 @@ protected XPackLicenseState getLicenseState() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -79,6 +81,7 @@ public List getRestHandlers( ) { return entSearchPlugin.getRestHandlers( settings, + namedWriteableRegistry, restController, clusterSettings, indexScopedSettings, diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java index 44d9c0fcf9e76..9b1f9c60d1607 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -85,6 +85,69 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContentWithMultipleConstraintTypes() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 32, + "type": "less_than" + }, + { + "constraint": "^\\\\\\\\d{4}-\\\\\\\\d{2}-\\\\\\\\d{2}$", + "type": "regex" + }, + { + "constraint": "int", + "type": "list_type" + }, + { + "constraint": [ + 1, + 2, + 3 + ], + "type": "included_in" + }, + { + "constraint": [ + "string_1", + "string_2", + "string_3" + ], + "type": "included_in" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorConfiguration testInstance) throws IOException { ConnectorConfiguration deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index eedfea13c671b..542ea948c12df 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -15,14 +15,17 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNativeAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorServiceTypeAction; import org.junit.Before; import java.util.ArrayList; @@ -51,7 +54,7 @@ public void setup() { public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Connector indexedConnector = awaitGetConnector(connectorId); @@ -60,7 +63,7 @@ public void testPutConnector() throws Exception { public void testPostConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - PostConnectorAction.Response resp = awaitPostConnector(connector); + PostConnectorAction.Response resp = buildRequestAndAwaitPostConnector(connector); Connector indexedConnector = awaitGetConnector(resp.getId()); assertThat(resp.getId(), equalTo(indexedConnector.getConnectorId())); @@ -71,7 +74,7 @@ public void testDeleteConnector() throws Exception { List connectorIds = new ArrayList<>(); for (int i = 0; i < numConnectors; i++) { Connector connector = ConnectorTestUtils.getRandomConnector(); - PostConnectorAction.Response resp = awaitPostConnector(connector); + PostConnectorAction.Response resp = buildRequestAndAwaitPostConnector(connector); connectorIds.add(resp.getId()); } @@ -86,7 +89,7 @@ public void testDeleteConnector() throws Exception { public void testUpdateConnectorConfiguration() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Map connectorConfiguration = connector.getConfiguration() @@ -109,7 +112,7 @@ public void testUpdateConnectorConfiguration() throws Exception { public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorIngestPipeline updatedPipeline = new ConnectorIngestPipeline.Builder().setName("test-pipeline") @@ -133,7 +136,7 @@ public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); List filteringList = IntStream.range(0, 10) @@ -155,7 +158,7 @@ public void testUpdateConnectorLastSeen() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); @@ -179,7 +182,7 @@ public void testUpdateConnectorLastSyncStats() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); @@ -198,7 +201,7 @@ public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorScheduling updatedScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); @@ -215,10 +218,31 @@ public void testUpdateConnectorScheduling() throws Exception { assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } + public void testUpdateConnectorServiceType() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + String newServiceType = randomAlphaOfLengthBetween(3, 10); + + UpdateConnectorServiceTypeAction.Request updateServiceTypeRequest = new UpdateConnectorServiceTypeAction.Request( + connectorId, + newServiceType + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorServiceType(updateServiceTypeRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(newServiceType, equalTo(indexedConnector.getServiceType())); + } + public void testUpdateConnectorError() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( @@ -236,7 +260,7 @@ public void testUpdateConnectorError() throws Exception { public void testUpdateConnectorNameOrDescription() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connectorId, connector); + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( @@ -253,6 +277,24 @@ public void testUpdateConnectorNameOrDescription() throws Exception { assertThat(updateNameDescriptionRequest.getDescription(), equalTo(indexedConnector.getDescription())); } + public void testUpdateConnectorNative() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + boolean isNative = randomBoolean(); + + UpdateConnectorNativeAction.Request updateNativeRequest = new UpdateConnectorNativeAction.Request(connectorId, isNative); + + DocWriteResponse updateResponse = awaitUpdateConnectorNative(updateNativeRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(isNative, equalTo(indexedConnector.isNative())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -278,11 +320,24 @@ public void onFailure(Exception e) { return resp.get(); } - private DocWriteResponse awaitPutConnector(String docId, Connector connector) throws Exception { + private DocWriteResponse buildRequestAndAwaitPutConnector(String docId, Connector connector) throws Exception { + PutConnectorAction.Request putConnectorRequest = new PutConnectorAction.Request( + docId, + connector.getDescription(), + connector.getIndexName(), + connector.isNative(), + connector.getLanguage(), + connector.getName(), + connector.getServiceType() + ); + return awaitPutConnector(putConnectorRequest); + } + + private DocWriteResponse awaitPutConnector(PutConnectorAction.Request request) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.putConnector(docId, connector, new ActionListener<>() { + connectorIndexService.createConnectorWithDocId(request, new ActionListener<>() { @Override public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); @@ -303,11 +358,23 @@ public void onFailure(Exception e) { return resp.get(); } - private PostConnectorAction.Response awaitPostConnector(Connector connector) throws Exception { + private PostConnectorAction.Response buildRequestAndAwaitPostConnector(Connector connector) throws Exception { + PostConnectorAction.Request postConnectorRequest = new PostConnectorAction.Request( + connector.getDescription(), + connector.getIndexName(), + connector.isNative(), + connector.getLanguage(), + connector.getName(), + connector.getServiceType() + ); + return awaitPostConnector(postConnectorRequest); + } + + private PostConnectorAction.Response awaitPostConnector(PostConnectorAction.Request request) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.postConnector(connector, new ActionListener<>() { + connectorIndexService.createConnectorWithAutoGeneratedId(request, new ActionListener<>() { @Override public void onResponse(PostConnectorAction.Response indexResponse) { resp.set(indexResponse); @@ -435,7 +502,7 @@ private UpdateResponse awaitUpdateConnectorLastSeen(UpdateConnectorLastSeenActio CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.updateConnectorLastSeen(checkIn, new ActionListener<>() { + connectorIndexService.checkInConnector(checkIn.getConnectorId(), new ActionListener<>() { @Override public void onResponse(UpdateResponse indexResponse) { resp.set(indexResponse); @@ -482,6 +549,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorNative(UpdateConnectorNativeAction.Request updateIndexNameRequest) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorNative(updateIndexNameRequest, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update is_native request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update is_native request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -532,6 +624,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorServiceType(UpdateConnectorServiceTypeAction.Request updateServiceTypeRequest) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorServiceType(updateServiceTypeRequest, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update service type request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update service type request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorName(UpdateConnectorNameAction.Request updatedNameOrDescription) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachineTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachineTests.java new file mode 100644 index 0000000000000..6fdda83244db8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorStateMachineTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.test.ESTestCase; + +public class ConnectorStateMachineTests extends ESTestCase { + + public void testValidTransitionFromCreated() { + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CREATED, ConnectorStatus.NEEDS_CONFIGURATION)); + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CREATED, ConnectorStatus.ERROR)); + } + + public void testInvalidTransitionFromCreated() { + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.CREATED, ConnectorStatus.CONFIGURED)); + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.CREATED, ConnectorStatus.CONNECTED)); + } + + public void testValidTransitionFromNeedsConfiguration() { + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.CONFIGURED)); + } + + public void testInvalidTransitionFromNeedsConfiguration() { + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.CREATED)); + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.CONNECTED)); + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.NEEDS_CONFIGURATION, ConnectorStatus.ERROR)); + } + + public void testValidTransitionFromConfigured() { + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONFIGURED, ConnectorStatus.NEEDS_CONFIGURATION)); + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONFIGURED, ConnectorStatus.CONNECTED)); + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONFIGURED, ConnectorStatus.ERROR)); + } + + public void testInvalidTransitionFromConfigured() { + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONFIGURED, ConnectorStatus.CREATED)); + } + + public void testValidTransitionFromConnected() { + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONNECTED, ConnectorStatus.CONFIGURED)); + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONNECTED, ConnectorStatus.ERROR)); + } + + public void testInvalidTransitionFromConnected() { + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONNECTED, ConnectorStatus.CREATED)); + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.CONNECTED, ConnectorStatus.NEEDS_CONFIGURATION)); + } + + public void testValidTransitionFromError() { + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.ERROR, ConnectorStatus.CONNECTED)); + assertTrue(ConnectorStateMachine.isValidTransition(ConnectorStatus.ERROR, ConnectorStatus.CONFIGURED)); + } + + public void testInvalidTransitionFromError() { + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.ERROR, ConnectorStatus.CREATED)); + assertFalse(ConnectorStateMachine.isValidTransition(ConnectorStatus.ERROR, ConnectorStatus.NEEDS_CONFIGURATION)); + } + + public void testTransitionToSameState() { + for (ConnectorStatus state : ConnectorStatus.values()) { + assertFalse("Transition from " + state + " to itself should be invalid", ConnectorStateMachine.isValidTransition(state, state)); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 6a16e6f183383..74b84e914a942 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -87,15 +87,15 @@ public static ConnectorIngestPipeline getRandomConnectorIngestPipeline() { public static ConnectorSyncInfo getRandomConnectorSyncInfo() { return new ConnectorSyncInfo.Builder().setLastAccessControlSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) - .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastAccessControlSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) .setLastDeletedDocumentCount(randomLong()) - .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastIndexedDocumentCount(randomLong()) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) - .setLastSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) - .setLastSynced(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastSynced(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .build(); } @@ -187,8 +187,10 @@ public static ConnectorFiltering getRandomConnectorFiltering() { } public static Connector getRandomSyncJobConnectorInfo() { + ConnectorFiltering randomFiltering = getRandomConnectorFiltering(); return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) - .setFiltering(List.of(getRandomConnectorFiltering())) + .setSyncJobFiltering(randomFiltering.getActive()) + .setFiltering(List.of(randomFiltering)) .setIndexName(randomAlphaOfLength(10)) .setLanguage(randomAlphaOfLength(10)) .setServiceType(randomAlphaOfLength(10)) @@ -249,7 +251,7 @@ public static Connector getRandomConnector() { .setIndexName(randomAlphaOfLength(10)) .setIsNative(randomBoolean()) .setLanguage(randomFrom(new String[] { null, randomAlphaOfLength(10) })) - .setLastSeen(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastSeen(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setSyncInfo(getRandomConnectorSyncInfo()) .setName(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setPipeline(randomBoolean() ? getRandomConnectorIngestPipeline() : null) @@ -287,6 +289,21 @@ private static Cron getRandomCronExpression() { ); } + /** + * Generate a random Instant between: + * - 1 January 1970 00:00:00+00:00 + * - 24 January 2065 05:20:00+00:00 + */ + public static Instant randomInstant() { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return Instant.ofEpochSecond( + randomLongBetween(lowerBoundInstant.getEpochSecond(), upperBoundInstant.getEpochSecond()), + randomLongBetween(0, 999999999) + ); + } + public static ConnectorSyncStatus getRandomSyncStatus() { ConnectorSyncStatus[] values = ConnectorSyncStatus.values(); return values[randomInt(values.length - 1)]; diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorUtilsTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorUtilsTests.java new file mode 100644 index 0000000000000..507d116fa6491 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorUtilsTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.time.Instant; + +public class ConnectorUtilsTests extends ESTestCase { + + public void testParseInstantConnectorFrameworkFormat() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2023-01-16T10:00:00.123+00:00\""); + parser.nextToken(); + Instant instant = ConnectorUtils.parseInstant(parser, "my_time_field"); + assertNotNull(instant); + assertEquals(1673863200123L, instant.toEpochMilli()); + } + + public void testParseInstantStandardJavaFormat() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2023-01-16T10:00:00.123000000Z\""); + parser.nextToken(); + Instant instant = ConnectorUtils.parseInstant(parser, "my_time_field"); + assertNotNull(instant); + assertEquals(1673863200123L, instant.toEpochMilli()); + } + + public void testParseInstantStandardJavaFormatWithNanosecondPrecision() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2023-01-16T10:00:00.123456789Z\""); + parser.nextToken(); + Instant instant = ConnectorUtils.parseInstant(parser, "my_time_field"); + assertNotNull(instant); + assertEquals(123456789L, instant.getNano()); + assertEquals(1673863200L, instant.getEpochSecond()); + } + + public void testParseNullableInstant() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, new BytesArray("null")); + parser.nextToken(); + Instant instant = ConnectorUtils.parseNullableInstant(parser, "my_time_field"); + assertNull(instant); + } + + public void testParseNullableInstantWithValue() throws IOException { + XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2023-01-16T10:00:00.123+00:00\""); + parser.nextToken(); + Instant instant = ConnectorUtils.parseNullableInstant(parser, "my_time_field"); + assertNotNull(instant); + assertEquals(1673863200123L, instant.toEpochMilli()); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java new file mode 100644 index 0000000000000..0f0e83f2b9c51 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PostConnectorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorIdAndIndexNamePresent_ExpectNoValidationError() { + PostConnectorAction.Request request = new PostConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { + PostConnectorAction.Request requestWithMissingConnectorId = new PostConnectorAction.Request( + randomAlphaOfLength(10), + "_illegal-index-name", + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("Invalid index name [_illegal-index-name]")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java new file mode 100644 index 0000000000000..a35c5c7e408f3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PutConnectorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorIdAndIndexNamePresent_ExpectNoValidationError() { + PutConnectorAction.Request request = new PutConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorIdIsNull_ExpectValidationError() { + PutConnectorAction.Request requestWithMissingConnectorId = new PutConnectorAction.Request( + null, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[connector_id] cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { + PutConnectorAction.Request requestWithMissingConnectorId = new PutConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + "_illegal-index-name", + randomBoolean(), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("Invalid index name [_illegal-index-name]")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..a680108e50055 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNativeActionRequestBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorNativeActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorNativeAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorNativeAction.Request::new; + } + + @Override + protected UpdateConnectorNativeAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorNativeAction.Request(connectorId, randomBoolean()); + } + + @Override + protected UpdateConnectorNativeAction.Request mutateInstance(UpdateConnectorNativeAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorNativeAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorNativeAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorNativeAction.Request mutateInstanceForVersion( + UpdateConnectorNativeAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..a30e0a6b8d493 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorServiceTypeActionRequestBWCSerializingTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorServiceTypeActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorServiceTypeAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorServiceTypeAction.Request::new; + } + + @Override + protected UpdateConnectorServiceTypeAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorServiceTypeAction.Request(connectorId, randomAlphaOfLengthBetween(3, 10)); + } + + @Override + protected UpdateConnectorServiceTypeAction.Request mutateInstance(UpdateConnectorServiceTypeAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorServiceTypeAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorServiceTypeAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorServiceTypeAction.Request mutateInstanceForVersion( + UpdateConnectorServiceTypeAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java new file mode 100644 index 0000000000000..b93c83c6494f3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; +import org.junit.Before; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class ConnectorSecretsIndexServiceTests extends ESSingleNodeTestCase { + + private static final int TIMEOUT_SECONDS = 10; + + private ConnectorSecretsIndexService connectorSecretsIndexService; + + @Before + public void setup() throws Exception { + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client()); + } + + public void testCreateAndGetConnectorSecret() throws Exception { + PostConnectorSecretRequest createSecretRequest = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + PostConnectorSecretResponse createdSecret = awaitPostConnectorSecret(createSecretRequest); + + GetConnectorSecretResponse gotSecret = awaitGetConnectorSecret(createdSecret.id()); + + assertThat(gotSecret.id(), equalTo(createdSecret.id())); + assertThat(gotSecret.value(), notNullValue()); + } + + public void testDeleteConnectorSecret() throws Exception { + PostConnectorSecretRequest createSecretRequest = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + PostConnectorSecretResponse createdSecret = awaitPostConnectorSecret(createSecretRequest); + + String secretIdToDelete = createdSecret.id(); + DeleteConnectorSecretResponse resp = awaitDeleteConnectorSecret(secretIdToDelete); + assertThat(resp.isDeleted(), equalTo(true)); + + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorSecret(secretIdToDelete)); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSecret(secretIdToDelete)); + } + + private PostConnectorSecretResponse awaitPostConnectorSecret(PostConnectorSecretRequest secretRequest) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + final AtomicReference responseRef = new AtomicReference<>(null); + final AtomicReference exception = new AtomicReference<>(null); + + connectorSecretsIndexService.createSecret(secretRequest, new ActionListener<>() { + @Override + public void onResponse(PostConnectorSecretResponse postConnectorSecretResponse) { + responseRef.set(postConnectorSecretResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + latch.countDown(); + } + }); + + if (exception.get() != null) { + throw exception.get(); + } + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + PostConnectorSecretResponse response = responseRef.get(); + + assertTrue("Timeout waiting for post request", requestTimedOut); + assertNotNull("Received null response from post request", response); + + return response; + } + + private GetConnectorSecretResponse awaitGetConnectorSecret(String connectorSecretId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSecretsIndexService.getSecret(connectorSecretId, new ActionListener() { + @Override + public void onResponse(GetConnectorSecretResponse response) { + resp.set(response); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for get request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from get request", resp.get()); + return resp.get(); + } + + private DeleteConnectorSecretResponse awaitDeleteConnectorSecret(String connectorSecretId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSecretsIndexService.deleteSecret(connectorSecretId, new ActionListener() { + @Override + public void onResponse(DeleteConnectorSecretResponse response) { + resp.set(response); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for delete request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from delete request", resp.get()); + return resp.get(); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java new file mode 100644 index 0000000000000..13051505f9c4d --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets; + +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; + +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; + +public class ConnectorSecretsTestUtils { + + public static GetConnectorSecretRequest getRandomGetConnectorSecretRequest() { + return new GetConnectorSecretRequest(randomAlphaOfLength(10)); + } + + public static GetConnectorSecretResponse getRandomGetConnectorSecretResponse() { + final String id = randomAlphaOfLength(10); + final String value = randomAlphaOfLength(10); + return new GetConnectorSecretResponse(id, value); + } + + public static PostConnectorSecretRequest getRandomPostConnectorSecretRequest() { + return new PostConnectorSecretRequest(randomAlphaOfLengthBetween(1, 20)); + } + + public static PostConnectorSecretResponse getRandomPostConnectorSecretResponse() { + return new PostConnectorSecretResponse(randomAlphaOfLength(10)); + } + + public static DeleteConnectorSecretRequest getRandomDeleteConnectorSecretRequest() { + return new DeleteConnectorSecretRequest(randomAlphaOfLengthBetween(1, 20)); + } + + public static DeleteConnectorSecretResponse getRandomDeleteConnectorSecretResponse() { + return new DeleteConnectorSecretResponse(randomBoolean()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java new file mode 100644 index 0000000000000..5d9127527fc3a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class DeleteConnectorSecretActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError() { + DeleteConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + DeleteConnectorSecretRequest requestWithMissingConnectorId = new DeleteConnectorSecretRequest(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("id missing")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..bdbdb1982173e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class DeleteConnectorSecretRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorSecretRequest::new; + } + + @Override + protected DeleteConnectorSecretRequest createTestInstance() { + return new DeleteConnectorSecretRequest(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected DeleteConnectorSecretRequest mutateInstance(DeleteConnectorSecretRequest instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorSecretRequest mutateInstanceForVersion(DeleteConnectorSecretRequest instance, TransportVersion version) { + return new DeleteConnectorSecretRequest(instance.id()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..964c5e15d845d --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class DeleteConnectorSecretResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + public NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorSecretResponse::new; + } + + @Override + protected DeleteConnectorSecretResponse createTestInstance() { + return ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretResponse(); + } + + @Override + protected DeleteConnectorSecretResponse mutateInstance(DeleteConnectorSecretResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorSecretResponse mutateInstanceForVersion(DeleteConnectorSecretResponse instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretActionTests.java new file mode 100644 index 0000000000000..9fc01e56ee5a0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretActionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class GetConnectorSecretActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError() { + GetConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomGetConnectorSecretRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + GetConnectorSecretRequest requestWithMissingConnectorId = new GetConnectorSecretRequest(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("id missing")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..abac910aa1dac --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretRequestBWCSerializingTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class GetConnectorSecretRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSecretRequest::new; + } + + @Override + protected GetConnectorSecretRequest createTestInstance() { + return new GetConnectorSecretRequest(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected GetConnectorSecretRequest mutateInstance(GetConnectorSecretRequest instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSecretRequest mutateInstanceForVersion(GetConnectorSecretRequest instance, TransportVersion version) { + return new GetConnectorSecretRequest(instance.id()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..4448024814df3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/GetConnectorSecretResponseBWCSerializingTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class GetConnectorSecretResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + public NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSecretResponse::new; + } + + @Override + protected GetConnectorSecretResponse createTestInstance() { + return ConnectorSecretsTestUtils.getRandomGetConnectorSecretResponse(); + } + + @Override + protected GetConnectorSecretResponse mutateInstance(GetConnectorSecretResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSecretResponse mutateInstanceForVersion(GetConnectorSecretResponse instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java new file mode 100644 index 0000000000000..f1e1a670b2748 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PostConnectorSecretActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError() { + PostConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + PostConnectorSecretRequest requestWithMissingValue = new PostConnectorSecretRequest(""); + ActionRequestValidationException exception = requestWithMissingValue.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("value is missing")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..b7f8c501a91e8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequestBWCSerializingTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorSecretRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorSecretRequest::new; + } + + @Override + protected PostConnectorSecretRequest createTestInstance() { + return ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + } + + @Override + protected PostConnectorSecretRequest mutateInstance(PostConnectorSecretRequest instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorSecretRequest mutateInstanceForVersion(PostConnectorSecretRequest instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..e114181270e95 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretResponseBWCSerializingTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorSecretResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorSecretResponse::new; + } + + @Override + protected PostConnectorSecretResponse createTestInstance() { + return ConnectorSecretsTestUtils.getRandomPostConnectorSecretResponse(); + } + + @Override + protected PostConnectorSecretResponse mutateInstance(PostConnectorSecretResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorSecretResponse mutateInstanceForVersion(PostConnectorSecretResponse instance, TransportVersion version) { + return instance; + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java new file mode 100644 index 0000000000000..165cc560ada1a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportDeleteConnectorSecretActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportDeleteConnectorSecretAction action; + + @Before + public void setup() { + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportDeleteConnectorSecretAction(transportService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testDeleteConnectorSecret_ExpectNoWarnings() throws InterruptedException { + DeleteConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(DeleteConnectorSecretRequest request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for delete request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretActionTests.java new file mode 100644 index 0000000000000..6b046c7e44506 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportGetConnectorSecretActionTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportGetConnectorSecretActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportGetConnectorSecretAction action; + + @Before + public void setup() { + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportGetConnectorSecretAction(transportService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testGetConnectorSecret_ExpectNoWarnings() throws InterruptedException { + GetConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomGetConnectorSecretRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(GetConnectorSecretRequest request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for get request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretActionTests.java new file mode 100644 index 0000000000000..056d2786de1d7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPostConnectorSecretActionTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportPostConnectorSecretActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportPostConnectorSecretAction action; + + @Before + public void setup() { + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportPostConnectorSecretAction(transportService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testPostConnectorSecret_ExpectNoWarnings() throws InterruptedException { + PostConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(PostConnectorSecretRequest request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for post request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 85d8826b98683..170ed25c0b302 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; @@ -35,6 +36,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -62,34 +64,35 @@ public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { private static final int ONE_SECOND_IN_MILLIS = 1000; private ConnectorSyncJobIndexService connectorSyncJobIndexService; - private Connector connectorOne; - private Connector connectorTwo; + + private String connectorOneId; + private String connectorTwoId; @Before public void setup() throws Exception { - connectorOne = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); - connectorTwo = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); - createConnector(connectorOne); - createConnector(connectorTwo); + connectorOneId = createConnector(); + connectorTwoId = createConnector(); this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); } - private void createConnector(Connector connector) throws IOException, InterruptedException, ExecutionException, TimeoutException { + private String createConnector() throws IOException, InterruptedException, ExecutionException, TimeoutException { + + Connector connector = ConnectorTestUtils.getRandomConnector(); + final IndexRequest indexRequest = new IndexRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connector.getConnectorId()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); ActionFuture index = client().index(indexRequest); // wait 10 seconds for connector creation - index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); + return index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getId(); } public void testCreateConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); @@ -110,7 +113,7 @@ public void testCreateConnectorSyncJob() throws Exception { public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connectorOne.getConnectorId(), + connectorOneId, null, ConnectorSyncJobTriggerMethod.ON_DEMAND ); @@ -123,7 +126,7 @@ public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeTo public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connectorOne.getConnectorId(), + connectorOneId, ConnectorSyncJobType.FULL, null ); @@ -148,7 +151,7 @@ public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() public void testDeleteConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -166,7 +169,7 @@ public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testGetConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); ConnectorSyncJobType jobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod triggerMethod = syncJobRequest.getTriggerMethod(); @@ -179,7 +182,7 @@ public void testGetConnectorSyncJob() throws Exception { assertThat(syncJob.getId(), equalTo(syncJobId)); assertThat(syncJob.getJobType(), equalTo(jobType)); assertThat(syncJob.getTriggerMethod(), equalTo(triggerMethod)); - assertThat(syncJob.getConnector().getConnectorId(), equalTo(connectorOne.getConnectorId())); + assertThat(syncJob.getConnector().getConnectorId(), equalTo(connectorOneId)); } public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { @@ -188,7 +191,7 @@ public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCheckInConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -227,7 +230,7 @@ public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCancelConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -269,7 +272,7 @@ public void testListConnectorSyncJobs() throws Exception { for (int i = 0; i < numberOfSyncJobs; i++) { PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(request); ConnectorSyncJob syncJob = awaitGetConnectorSyncJob(response.getId()); @@ -309,7 +312,7 @@ public void testListConnectorSyncJobs() throws Exception { } public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancelled_ExpectOnePending() throws Exception { - String connectorId = connectorOne.getConnectorId(); + String connectorId = connectorOneId; PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); PostConnectorSyncJobAction.Request requestTwo = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); @@ -342,9 +345,6 @@ public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancel @AwaitsFix(bugUrl = "https://github.com/elastic/enterprise-search-team/issues/6351") public void testListConnectorSyncJobs_WithConnectorOneId_GivenTwoOverallOneFromConnectorOne_ExpectOne() throws Exception { - String connectorOneId = connectorOne.getConnectorId(); - String connectorTwoId = connectorTwo.getConnectorId(); - PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connectorOneId ); @@ -378,7 +378,7 @@ public void testListConnectorSyncJobs_WithNoSyncJobs_ReturnEmptyResult() throws public void testUpdateConnectorSyncJobError() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -407,7 +407,7 @@ public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -451,7 +451,7 @@ public void testUpdateConnectorSyncJobIngestionStats() throws Exception { public void testUpdateConnectorSyncJobIngestionStats_WithoutLastSeen_ExpectUpdateOfLastSeen() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connectorOne.getConnectorId() + connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -492,6 +492,23 @@ public void testUpdateConnectorSyncJobIngestionStats_WithMissingSyncJobId_Expect ); } + public void testTransformConnectorFilteringToSyncJobRepresentation_WithFilteringEqualNull() { + List filtering = null; + assertNull(connectorSyncJobIndexService.transformConnectorFilteringToSyncJobRepresentation(filtering)); + } + + public void testTransformConnectorFilteringToSyncJobRepresentation_WithFilteringEmpty() { + List filtering = Collections.emptyList(); + assertNull(connectorSyncJobIndexService.transformConnectorFilteringToSyncJobRepresentation(filtering)); + } + + public void testTransformConnectorFilteringToSyncJobRepresentation_WithFilteringRules() { + ConnectorFiltering filtering1 = ConnectorTestUtils.getRandomConnectorFiltering(); + + List filtering = List.of(filtering1, ConnectorTestUtils.getRandomConnectorFiltering()); + assertEquals(connectorSyncJobIndexService.transformConnectorFilteringToSyncJobRepresentation(filtering), filtering1.getActive()); + } + private UpdateResponse awaitUpdateConnectorSyncJobIngestionStats(UpdateConnectorSyncJobIngestionStatsAction.Request request) throws Exception { CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java new file mode 100644 index 0000000000000..b702a5ffa7eef --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +public class ConnectorSyncJobStateMachineTests extends ESTestCase { + + public void testValidTransitionFromPending() { + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.IN_PROGRESS)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.CANCELED)); + } + + public void testInvalidTransitionFromPending() { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.COMPLETED)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.SUSPENDED)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.CANCELING)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.ERROR)); + } + + public void testValidTransitionFromInProgress() { + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.CANCELING)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.COMPLETED)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.SUSPENDED)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.ERROR)); + } + + public void testInvalidTransitionFromInProgress() { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.PENDING)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.IN_PROGRESS, ConnectorSyncStatus.CANCELED)); + } + + public void testNoValidTransitionsFromCompleted() { + for (ConnectorSyncStatus state : ConnectorSyncStatus.values()) { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.COMPLETED, state)); + } + } + + public void testValidTransitionFromSuspended() { + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.IN_PROGRESS)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.CANCELED)); + } + + public void testInvalidTransitionFromSuspended() { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.PENDING)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.COMPLETED)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.CANCELING)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.SUSPENDED, ConnectorSyncStatus.ERROR)); + } + + public void testValidTransitionFromCanceling() { + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.CANCELED)); + assertTrue(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.ERROR)); + } + + public void testInvalidTransitionFromCanceling() { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.PENDING)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.IN_PROGRESS)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.COMPLETED)); + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELING, ConnectorSyncStatus.SUSPENDED)); + } + + public void testNoValidTransitionsFromCanceled() { + for (ConnectorSyncStatus state : ConnectorSyncStatus.values()) { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.CANCELED, state)); + } + } + + public void testNoValidTransitionsFromError() { + for (ConnectorSyncStatus state : ConnectorSyncStatus.values()) { + assertFalse(ConnectorSyncJobStateMachine.isValidTransition(ConnectorSyncStatus.ERROR, state)); + } + } + + public void testTransitionToSameState() { + for (ConnectorSyncStatus state : ConnectorSyncStatus.values()) { + assertFalse( + "Transition from " + state + " to itself should be invalid", + ConnectorSyncJobStateMachine.isValidTransition(state, state) + ); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index b82db8d04d3a9..64f11923ce164 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -21,7 +21,6 @@ import java.util.List; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -46,88 +45,60 @@ public final void testRandomSerialization() throws IOException { public void testFromXContent_WithAllFields_AllSet() throws IOException { String content = XContentHelper.stripWhitespace(""" { - "cancelation_requested_at": "2023-12-01T14:19:39.394194Z", - "canceled_at": "2023-12-01T14:19:39.394194Z", - "completed_at": "2023-12-01T14:19:39.394194Z", - "connector": { - "id": "connector-id", - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - } - } - ], - "index_name": "search-connector", - "language": "english", - "pipeline": { - "extract_binary_content": true, - "name": "ent-search-generic-ingestion", - "reduce_whitespace": true, - "run_ml_inference": false - }, - "service_type": "service type", - "configuration": {} - }, - "created_at": "2023-12-01T14:18:43.07693Z", - "deleted_document_count": 10, - "error": "some-error", - "id": "HIC-JYwB9RqKhB7x_hIE", - "indexed_document_count": 10, - "indexed_document_volume": 10, - "job_type": "full", - "last_seen": "2023-12-01T14:18:43.07693Z", - "metadata": {}, - "started_at": "2023-12-01T14:18:43.07693Z", - "status": "canceling", - "total_document_count": 0, - "trigger_method": "scheduled", - "worker_hostname": "worker-hostname" - } + "cancelation_requested_at": "2023-12-01T14:19:39.394194Z", + "canceled_at": "2023-12-01T14:19:39.394194Z", + "completed_at": "2023-12-01T14:19:39.394194Z", + "connector": { + "id": "connector-id", + "filtering": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "error": "some-error", + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "started_at": "2023-12-01T14:18:43.07693Z", + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled", + "worker_hostname": "worker-hostname" + } """); ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); @@ -137,7 +108,7 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { assertThat(syncJob.getCompletedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); assertThat(syncJob.getConnector().getConnectorId(), equalTo("connector-id")); - assertThat(syncJob.getConnector().getFiltering(), hasSize(greaterThan(0))); + assertThat(syncJob.getConnector().getSyncJobFiltering().getRules(), hasSize(1)); assertThat(syncJob.getConnector().getIndexName(), equalTo("search-connector")); assertThat(syncJob.getConnector().getLanguage(), equalTo("english")); assertThat(syncJob.getConnector().getPipeline(), notNullValue()); @@ -161,82 +132,54 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { public void testFromXContent_WithOnlyNonNullableFieldsSet_DoesNotThrow() throws IOException { String content = XContentHelper.stripWhitespace(""" { - "connector": { - "id": "connector-id", - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - } - } - ], - "index_name": "search-connector", - "language": "english", - "pipeline": { - "extract_binary_content": true, - "name": "ent-search-generic-ingestion", - "reduce_whitespace": true, - "run_ml_inference": false - }, - "service_type": "service type", - "configuration": {} - }, - "created_at": "2023-12-01T14:18:43.07693Z", - "deleted_document_count": 10, - "id": "HIC-JYwB9RqKhB7x_hIE", - "indexed_document_count": 10, - "indexed_document_volume": 10, - "job_type": "full", - "last_seen": "2023-12-01T14:18:43.07693Z", - "metadata": {}, - "status": "canceling", - "total_document_count": 0, - "trigger_method": "scheduled" - } + "connector": { + "id": "connector-id", + "filtering": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled" + } """); ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); @@ -245,88 +188,60 @@ public void testFromXContent_WithOnlyNonNullableFieldsSet_DoesNotThrow() throws public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throws IOException { String content = XContentHelper.stripWhitespace(""" { - "cancelation_requested_at": null, - "canceled_at": null, - "completed_at": null, - "connector": { - "id": "connector-id", - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - } - } - ], - "index_name": "search-connector", - "language": "english", - "pipeline": { - "extract_binary_content": true, - "name": "ent-search-generic-ingestion", - "reduce_whitespace": true, - "run_ml_inference": false - }, - "service_type": "service type", - "configuration": {} - }, - "created_at": "2023-12-01T14:18:43.07693Z", - "deleted_document_count": 10, - "error": null, - "id": "HIC-JYwB9RqKhB7x_hIE", - "indexed_document_count": 10, - "indexed_document_volume": 10, - "job_type": "full", - "last_seen": null, - "metadata": {}, - "started_at": null, - "status": "canceling", - "total_document_count": 0, - "trigger_method": "scheduled", - "worker_hostname": null - } + "cancelation_requested_at": null, + "canceled_at": null, + "completed_at": null, + "connector": { + "id": "connector-id", + "filtering": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "error": null, + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": null, + "metadata": {}, + "started_at": null, + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled", + "worker_hostname": null + } """); ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); @@ -336,57 +251,29 @@ public void testSyncJobConnectorFromXContent_WithAllFieldsSet() throws IOExcepti String content = XContentHelper.stripWhitespace(""" { "id": "connector-id", - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + "filtering": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" } + ], + "validation": { + "errors": [], + "state": "valid" } - ], + }, "index_name": "search-connector", "language": "english", "pipeline": { @@ -403,7 +290,7 @@ public void testSyncJobConnectorFromXContent_WithAllFieldsSet() throws IOExcepti Connector connector = ConnectorSyncJob.syncJobConnectorFromXContentBytes(new BytesArray(content), null, XContentType.JSON); assertThat(connector.getConnectorId(), equalTo("connector-id")); - assertThat(connector.getFiltering().size(), equalTo(1)); + assertThat(connector.getSyncJobFiltering().getRules(), hasSize(1)); assertThat(connector.getIndexName(), equalTo("search-connector")); assertThat(connector.getLanguage(), equalTo("english")); assertThat(connector.getPipeline(), notNullValue()); @@ -415,57 +302,29 @@ public void testSyncJobConnectorFromXContent_WithAllNonOptionalFieldsSet_DoesNot String content = XContentHelper.stripWhitespace(""" { "id": "connector-id", - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } - }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-12-01T14:18:37.397819Z", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-12-01T14:18:37.397819Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-12-01T14:18:37.397819Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + "filtering": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" } + ], + "validation": { + "errors": [], + "state": "valid" } - ], + }, "index_name": null, "language": null, "pipeline": null, diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index fc11e04c4ede2..cda236c3d02ae 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -25,8 +25,6 @@ dependencies { testImplementation project(path: ':modules:analysis-common') testImplementation 'io.ous:jtoml:2.0.0' - - internalClusterTestImplementation project(":client:rest-high-level") } diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle index ea428790f2b05..8b3f0d11361f3 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle @@ -16,10 +16,6 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' -dependencies { - testImplementation project(':client:rest-high-level') -} - BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> /** diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java index 6efe56fbf2222..a68c0d3ae9d96 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java @@ -37,7 +37,6 @@ * This test ensures that EQL can process CCS requests correctly when the local and remote clusters * have different but compatible versions. */ -@SuppressWarnings("removal") public class EqlCcsRollingUpgradeIT extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(EqlCcsRollingUpgradeIT.class); diff --git a/x-pack/plugin/eql/qa/common/build.gradle b/x-pack/plugin/eql/qa/common/build.gradle index 67fd415fa57d9..5fe6e54a440a8 100644 --- a/x-pack/plugin/eql/qa/common/build.gradle +++ b/x-pack/plugin/eql/qa/common/build.gradle @@ -5,7 +5,6 @@ dependencies { api project(xpackModule('core')) api testArtifact(project(xpackModule('core'))) api project(xpackModule('ql:test-fixtures')) - implementation project(":client:rest-high-level") // TOML parser for EqlActionIT tests api 'io.ous:jtoml:2.0.0' } diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index 0008c30f260d6..d245dc444f0b8 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -9,7 +9,6 @@ dependencies { javaRestTestImplementation project(':test:framework') javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(xpackModule('ql:test-fixtures')) - javaRestTestImplementation project(":client:rest-high-level") javaRestTestImplementation 'io.ous:jtoml:2.0.0' } diff --git a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDataLoader.java b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDataLoader.java index 0c2ce20facebb..9fa0320527f93 100644 --- a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDataLoader.java +++ b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDataLoader.java @@ -30,7 +30,6 @@ import static org.elasticsearch.test.ESTestCase.assertEquals; -@SuppressWarnings("removal") public class EqlDataLoader { private static final String PROPERTIES_FILENAME = "config.properties"; diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java index c0a286cc5c464..414705aff0b79 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilter; @@ -258,7 +257,7 @@ protected TaskId cancelTaskWithXOpaqueId(String id, String action) { TaskId taskId = findTaskWithXOpaqueId(id, action); assertNotNull(taskId); logger.trace("Cancelling task " + taskId); - CancelTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); + ListTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); assertThat(response.getTasks(), hasSize(1)); assertThat(response.getTasks().get(0).action(), equalTo(action)); logger.trace("Task is cancelled " + taskId); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchAction.java index 408bc445d86d8..b3cdd46aca93e 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchAction.java @@ -13,6 +13,6 @@ public class EqlSearchAction extends ActionType { public static final String NAME = "indices:data/read/eql"; private EqlSearchAction() { - super(NAME, EqlSearchResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index 2d7a330560fcc..f9f9238b6c4ab 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -281,7 +281,7 @@ private Event(StreamInput in) throws IOException { } else { fetchFields = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { missing = in.readBoolean(); } else { missing = index.isEmpty(); @@ -304,7 +304,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(fetchFields, StreamOutput::writeWriteable); } } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { // for BWC, 8.9.1+ does not have "missing" attribute, but it considers events with an empty index "" as missing events // see https://github.com/elastic/elasticsearch/pull/98130 out.writeBoolean(missing); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index ecb8ce633d985..a2309c48578a3 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -18,7 +18,7 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; @@ -95,7 +95,7 @@ public static ActionListener multiSearchLogListener(ActionL } private static void logSearchResponse(SearchResponse response, Logger logger) { - List aggs = Collections.emptyList(); + List aggs = Collections.emptyList(); if (response.getAggregations() != null) { aggs = response.getAggregations().asList(); } @@ -182,7 +182,8 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, boolean i } public static List searchHits(SearchResponse response) { - return Arrays.asList(response.getHits().getHits()); + // TODO remove unpooled usage + return Arrays.asList(response.getHits().asUnpooled().getHits()); } /** diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetResultAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetResultAction.java index 4b7edcd24b34e..2bd7032f49b4a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetResultAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetResultAction.java @@ -14,6 +14,6 @@ public class EqlAsyncGetResultAction extends ActionType { public static final EqlAsyncGetResultAction INSTANCE = new EqlAsyncGetResultAction(); private EqlAsyncGetResultAction() { - super(EqlAsyncActionNames.EQL_ASYNC_GET_RESULT_ACTION_NAME, EqlSearchResponse::new); + super(EqlAsyncActionNames.EQL_ASYNC_GET_RESULT_ACTION_NAME); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetStatusAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetStatusAction.java index 24d6fd6fee68a..46e94a669fa59 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetStatusAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlAsyncGetStatusAction.java @@ -14,6 +14,6 @@ public class EqlAsyncGetStatusAction extends ActionType { public static final String NAME = "cluster:monitor/eql/async/status"; private EqlAsyncGetStatusAction() { - super(NAME, QlStatusResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java index 881cb083a48f2..fe21051b4063e 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -101,6 +102,7 @@ public List> getSettings() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlStatsAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlStatsAction.java index 4c68c7ef155d1..29c9eb6237fd8 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlStatsAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlStatsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.eql.plugin; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class EqlStatsAction extends ActionType { @@ -16,6 +15,6 @@ public class EqlStatsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/eql/stats/dist"; private EqlStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index f573ea805b0b9..7c8204d1ecf39 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -210,7 +210,7 @@ public static void operation( r -> listener.onResponse(qualifyHits(r, clusterAlias)), e -> listener.onFailure(qualifyException(e, remoteIndices, clusterAlias)) ), - EqlSearchAction.INSTANCE.getResponseReader(), + EqlSearchResponse::new, TransportResponseHandler.TRANSPORT_WORKER ) ); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java index edbeb3d0a0d8c..255e94d6bda34 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java @@ -289,7 +289,7 @@ private List mutateEvents(List original, TransportVersion version) e.id(), e.source(), version.onOrAfter(TransportVersions.V_7_13_0) ? e.fetchFields() : null, - version.onOrAfter(TransportVersions.V_8_500_061) ? e.missing() : e.index().isEmpty() + version.onOrAfter(TransportVersions.V_8_10_X) ? e.missing() : e.index().isEmpty() ) ); } @@ -299,10 +299,10 @@ private List mutateEvents(List original, TransportVersion version) public void testEmptyIndexAsMissingEvent() throws IOException { Event event = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null); BytesStreamOutput out = new BytesStreamOutput(); - out.setTransportVersion(TransportVersions.V_8_500_020);// 8.9.1 + out.setTransportVersion(TransportVersions.V_8_9_X);// 8.9.1 event.writeTo(out); ByteArrayStreamInput in = new ByteArrayStreamInput(out.bytes().array()); - in.setTransportVersion(TransportVersions.V_8_500_020); + in.setTransportVersion(TransportVersions.V_8_9_X); Event event2 = Event.readFrom(in); assertTrue(event2.missing()); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java index f391e9bdae84b..7bb6a228f6e48 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java @@ -74,14 +74,14 @@ public void query(QueryRequest r, ActionListener l) { } long sortValue = implicitTiebreakerValues.get(ordinal); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues( new Long[] { (long) ordinal, sortValue }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW } ) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); ActionListener.respondAndRelease( l, new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) @@ -94,7 +94,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java index eb417570cb4a7..a8ed842e94c44 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java @@ -188,7 +188,7 @@ static class EventsAsHits { Map documentFields = new HashMap<>(); documentFields.put(KEY_FIELD_NAME, new DocumentField(KEY_FIELD_NAME, Collections.singletonList(value.v1()))); // save the timestamp both as docId (int) and as id (string) - SearchHit searchHit = new SearchHit(entry.getKey(), entry.getKey().toString()); + SearchHit searchHit = SearchHit.unpooled(entry.getKey(), entry.getKey().toString()); searchHit.addDocumentFields(documentFields, Map.of()); hits.add(searchHit); } @@ -215,7 +215,7 @@ public void query(QueryRequest r, ActionListener l) { Map> evs = ordinal != Integer.MAX_VALUE ? events.get(ordinal) : emptyMap(); EventsAsHits eah = new EventsAsHits(evs); - SearchHits searchHits = new SearchHits( + SearchHits searchHits = SearchHits.unpooled( eah.hits.toArray(SearchHits.EMPTY), new TotalHits(eah.hits.size(), Relation.EQUAL_TO), 0.0f @@ -232,7 +232,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java index 9141555fcd613..afb9b590914dd 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java @@ -29,7 +29,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; @@ -171,8 +172,8 @@ public void fetchHits(Iterable> refs, ActionListener searchHits = new ArrayList<>(); - searchHits.add(new SearchHit(1, String.valueOf(1))); - searchHits.add(new SearchHit(2, String.valueOf(2))); + searchHits.add(SearchHit.unpooled(1, String.valueOf(1))); + searchHits.add(SearchHit.unpooled(2, String.valueOf(2))); return new Sample(new SequenceKey(randomAlphaOfLength(10)), searchHits); } @@ -220,11 +221,11 @@ protected void @SuppressWarnings("unchecked") void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { - Aggregations aggs = new Aggregations(List.of(newInternalComposite())); + InternalAggregations aggs = InternalAggregations.from(List.of(newInternalComposite())); ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, null, false, diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/SampleIteratorTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/SampleIteratorTests.java index 76deb43f2e620..5019b3ce13b5c 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/SampleIteratorTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/SampleIteratorTests.java @@ -73,7 +73,7 @@ private List asSearchHitsList(Integer... docIds) { } List searchHits = new ArrayList<>(docIds.length); for (Integer docId : docIds) { - searchHits.add(new SearchHit(docId, docId.toString())); + searchHits.add(SearchHit.unpooled(docId, docId.toString())); } return searchHits; diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java index b995693458095..f62100a98b066 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java @@ -153,7 +153,7 @@ private SearchHit searchHit(Object timeValue, Object tiebreakerValue, Supplier fields = new HashMap<>(); fields.put(tsField, new DocumentField(tsField, singletonList(timeValue))); fields.put(tbField, new DocumentField(tsField, singletonList(tiebreakerValue))); - SearchHit searchHit = new SearchHit(randomInt(), randomAlphaOfLength(10)); + SearchHit searchHit = SearchHit.unpooled(randomInt(), randomAlphaOfLength(10)); searchHit.addDocumentFields(fields, Map.of()); searchHit.sortValues(searchSortValues.get()); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java index 9c9bbfcdc5127..0bdb88592ce0f 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java @@ -135,7 +135,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } @@ -236,12 +236,12 @@ protected void @SuppressWarnings("unchecked") void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { int ordinal = searchRequest.source().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 0.0f); SearchResponse response = new SearchResponse( searchHits, null, diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java index 7ef2b95d982fb..3097fbbc7f04a 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java @@ -107,11 +107,11 @@ static class TestQueryClient implements QueryClient { @Override public void query(QueryRequest r, ActionListener l) { int ordinal = r.searchSource().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); ActionListener.respondAndRelease( l, new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) @@ -124,7 +124,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } @@ -425,12 +425,12 @@ private class SuccessfulESMockClient extends ESMockClient { @Override void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { int ordinal = searchRequest.source().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchResponse response = new SearchResponse( searchHits, null, @@ -477,11 +477,11 @@ void handleSearchRequest(ActionListener void handleSearchRequest(ActionListener filter) { - ExecutableElement result = findMethod(declarationType, names, filter); + ExecutableElement result = findMethod(names, filter, declarationType, superClassOf(declarationType)); if (result == null) { if (names.length == 1) { throw new IllegalArgumentException(declarationType + "#" + names[0] + " is required"); @@ -58,18 +61,34 @@ static ExecutableElement findRequiredMethod(TypeElement declarationType, String[ } static ExecutableElement findMethod(TypeElement declarationType, String name) { - return findMethod(declarationType, new String[] { name }, e -> true); + return findMethod(new String[] { name }, e -> true, declarationType, superClassOf(declarationType)); } - static ExecutableElement findMethod(TypeElement declarationType, String[] names, Predicate filter) { - for (ExecutableElement e : ElementFilter.methodsIn(declarationType.getEnclosedElements())) { - if (e.getModifiers().contains(Modifier.STATIC) == false) { - continue; + private static TypeElement superClassOf(TypeElement declarationType) { + TypeMirror superclass = declarationType.getSuperclass(); + if (superclass instanceof DeclaredType declaredType) { + Element superclassElement = declaredType.asElement(); + if (superclassElement instanceof TypeElement) { + return (TypeElement) superclassElement; } - String name = e.getSimpleName().toString(); - for (String n : names) { - if (n.equals(name) && filter.test(e)) { - return e; + } + return null; + } + + static ExecutableElement findMethod(TypeElement declarationType, String[] names, Predicate filter) { + return findMethod(names, filter, declarationType); + } + + static ExecutableElement findMethod(String[] names, Predicate filter, TypeElement... declarationTypes) { + for (TypeElement declarationType : declarationTypes) { + for (ExecutableElement e : ElementFilter.methodsIn(declarationType.getEnclosedElements())) { + if (e.getModifiers().contains(Modifier.STATIC)) { + String name = e.getSimpleName().toString(); + for (String n : names) { + if (n.equals(name) && filter.test(e)) { + return e; + } + } } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index a592bd65acb3a..666f1ad926eeb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -67,7 +67,6 @@ public boolean getBoolean(int valueIndex) { @Override public BooleanBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newBooleanBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java index 82a0bb364966b..a19ed24302b65 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java @@ -68,7 +68,6 @@ public boolean getBoolean(int valueIndex) { @Override public BooleanBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newBooleanBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 7f1a1608dac5b..69e5499eaba46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -70,7 +70,6 @@ public BytesRef getBytesRef(int valueIndex, BytesRef dest) { @Override public BytesRefBlock filter(int... positions) { - // TODO use reference counting to share the vector final BytesRef scratch = new BytesRef(); try (var builder = blockFactory().newBytesRefBlockBuilder(positions.length)) { for (int pos : positions) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index cb5258c7ae22c..b5f5c69e0508a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -67,7 +67,6 @@ public double getDouble(int valueIndex) { @Override public DoubleBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newDoubleBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java index 59bbd5a941e4b..39f959edf5ee3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java @@ -68,7 +68,6 @@ public double getDouble(int valueIndex) { @Override public DoubleBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newDoubleBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 0d8262975c535..2afefbff16117 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -67,7 +67,6 @@ public int getInt(int valueIndex) { @Override public IntBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newIntBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java index b1a1473ff4b4a..dc60ce43c04cc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java @@ -68,7 +68,6 @@ public int getInt(int valueIndex) { @Override public IntBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newIntBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index c12033e829e6f..7491d6519fc57 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -67,7 +67,6 @@ public long getLong(int valueIndex) { @Override public LongBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newLongBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java index 9eb8a527a96b5..3ff9a12991d43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java @@ -68,7 +68,6 @@ public long getLong(int valueIndex) { @Override public LongBlock filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().newLongBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java index 40fe7ffdde661..b537b6d96fc9d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link BooleanBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForBoolean implements KeyExtractor { static KeyExtractorForBoolean extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BooleanBlock block) { BooleanVector v = block.asVector(); if (v != null) { - return new KeyExtractorForBoolean.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForBoolean.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForBoolean.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBoolean.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBoolean.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForBoolean.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBoolean.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBoolean.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForBoolean { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForBoolean%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForBoolean { private final BooleanVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, BooleanVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, BooleanVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForBoolean { + static class MinFromAscendingBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForBoolean { + static class MaxFromAscendingBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForBoolean { + static class MinFromUnorderedBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -120,10 +131,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForBoolean { + static class MaxFromUnorderedBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java index 2f546a46aaeaf..bf07905019dad 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java @@ -12,20 +12,26 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link BytesRefBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForBytesRef implements KeyExtractor { static KeyExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BytesRefBlock block) { BytesRefVector v = block.asVector(); if (v != null) { - return new KeyExtractorForBytesRef.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForBytesRef.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForBytesRef.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBytesRef.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBytesRef.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForBytesRef.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBytesRef.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBytesRef.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final TopNEncoder encoder; @@ -49,10 +55,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForBytesRef { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForBytesRef%s(%s, %s, %s)", getClass().getSimpleName(), encoder, nul, nonNul); + } + + static class FromVector extends KeyExtractorForBytesRef { private final BytesRefVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, BytesRefVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, BytesRefVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -63,10 +74,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForBytesRef { + static class MinFromAscendingBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -80,10 +91,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForBytesRef { + static class MaxFromAscendingBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -97,12 +108,12 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForBytesRef { + static class MinFromUnorderedBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; private final BytesRef minScratch = new BytesRef(); - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -128,12 +139,12 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForBytesRef { + static class MaxFromUnorderedBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; private final BytesRef maxScratch = new BytesRef(); - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java index 5e821b9e24db5..03477a65a3cde 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link DoubleBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForDouble implements KeyExtractor { static KeyExtractorForDouble extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, DoubleBlock block) { DoubleVector v = block.asVector(); if (v != null) { - return new KeyExtractorForDouble.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForDouble.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForDouble.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForDouble.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForDouble.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForDouble.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForDouble.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForDouble.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForDouble { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForDouble%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForDouble { private final DoubleVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, DoubleVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, DoubleVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForDouble { + static class MinFromAscendingBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForDouble { + static class MaxFromAscendingBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForDouble { + static class MinFromUnorderedBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForDouble { + static class MaxFromUnorderedBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java index d4269a622f098..5f45df662efdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link IntBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForInt implements KeyExtractor { static KeyExtractorForInt extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, IntBlock block) { IntVector v = block.asVector(); if (v != null) { - return new KeyExtractorForInt.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForInt.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForInt.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForInt.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForInt.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForInt.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForInt.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForInt.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForInt { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForInt%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForInt { private final IntVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, IntVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, IntVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForInt { + static class MinFromAscendingBlock extends KeyExtractorForInt { private final IntBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForInt { + static class MaxFromAscendingBlock extends KeyExtractorForInt { private final IntBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForInt { + static class MinFromUnorderedBlock extends KeyExtractorForInt { private final IntBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForInt { + static class MaxFromUnorderedBlock extends KeyExtractorForInt { private final IntBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java index 6a200efff529d..e61ab644ecfe1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link LongBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForLong implements KeyExtractor { static KeyExtractorForLong extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, LongBlock block) { LongVector v = block.asVector(); if (v != null) { - return new KeyExtractorForLong.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForLong.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForLong.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForLong.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForLong.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForLong.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForLong.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForLong.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForLong { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForLong%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForLong { private final LongVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, LongVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, LongVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForLong { + static class MinFromAscendingBlock extends KeyExtractorForLong { private final LongBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForLong { + static class MaxFromAscendingBlock extends KeyExtractorForLong { private final LongBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForLong { + static class MinFromUnorderedBlock extends KeyExtractorForLong { private final LongBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForLong { + static class MaxFromUnorderedBlock extends KeyExtractorForLong { private final LongBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 184ef69f00d85..e6b8d70a63ed7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; +/** + * Builds the resulting {@link BooleanBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForBoolean implements ResultBuilder { private final BooleanBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index 4008f7fbd924b..637cddb9b3089 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +/** + * Builds the resulting {@link BytesRefBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForBytesRef implements ResultBuilder { private final BytesRefBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index f06a1e814ef43..e7119ee714c34 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; +/** + * Builds the resulting {@link DoubleBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForDouble implements ResultBuilder { private final DoubleBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 848bbf9ab6a0a..ad1236975141b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; +/** + * Builds the resulting {@link IntBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForInt implements ResultBuilder { private final IntBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index b4361ad83180a..cad392c3d525c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; +/** + * Builds the resulting {@link LongBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForLong implements ResultBuilder { private final LongBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java index b13dd3ce7f2b0..535618da01727 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link BooleanBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForBoolean implements ValueExtractor { static ValueExtractorForBoolean extractorFor(TopNEncoder encoder, boolean inKey, BooleanBlock block) { BooleanVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java index 65c5da5737a59..70065fd544759 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java @@ -12,6 +12,10 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link BytesRefBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForBytesRef implements ValueExtractor { static ValueExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean inKey, BytesRefBlock block) { BytesRefVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java index d20f2bf53972a..b504196dff7e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link DoubleBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForDouble implements ValueExtractor { static ValueExtractorForDouble extractorFor(TopNEncoder encoder, boolean inKey, DoubleBlock block) { DoubleVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java index d20368f874e8e..485d9f4bb8559 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link IntBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForInt implements ValueExtractor { static ValueExtractorForInt extractorFor(TopNEncoder encoder, boolean inKey, IntBlock block) { IntVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java index b7b566b3eda3d..4a244746bd0d3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link LongBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForLong implements ValueExtractor { static ValueExtractorForLong extractorFor(TopNEncoder encoder, boolean inKey, LongBlock block) { LongVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..eef094ce2ecfa --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final CentroidPointAggregator.CentroidState state; + + private final List channels; + + public SpatialCentroidCartesianPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, CentroidPointAggregator.CentroidState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialCentroidCartesianPointDocValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialCentroidCartesianPointDocValuesAggregatorFunction(driverContext, channels, SpatialCentroidCartesianPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + assert xVal.getPositionCount() == 1; + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + assert xDel.getPositionCount() == 1; + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + assert yVal.getPositionCount() == 1; + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + assert yDel.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + SpatialCentroidCartesianPointDocValuesAggregator.combineIntermediate(state, xVal.getDouble(0), xDel.getDouble(0), yVal.getDouble(0), yDel.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialCentroidCartesianPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..3228340beeb43 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialCentroidCartesianPointDocValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialCentroidCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_centroid_cartesian_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..de35965f52575 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,205 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final CentroidPointAggregator.GroupingCentroidState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction(List channels, + CentroidPointAggregator.GroupingCentroidState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction(channels, SpatialCentroidCartesianPointDocValuesAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); + DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); + DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); + DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); + LongVector count = page.getBlock(channels.get(4)).asVector(); + assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidCartesianPointDocValuesAggregator.combineIntermediate(state, groupId, xVal.getDouble(groupPosition + positionOffset), xDel.getDouble(groupPosition + positionOffset), yVal.getDouble(groupPosition + positionOffset), yDel.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + CentroidPointAggregator.GroupingCentroidState inState = ((SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialCentroidCartesianPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialCentroidCartesianPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..bdc7c58a6c963 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final CentroidPointAggregator.CentroidState state; + + private final List channels; + + public SpatialCentroidCartesianPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, CentroidPointAggregator.CentroidState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialCentroidCartesianPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialCentroidCartesianPointSourceValuesAggregatorFunction(driverContext, channels, SpatialCentroidCartesianPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + assert xVal.getPositionCount() == 1; + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + assert xDel.getPositionCount() == 1; + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + assert yVal.getPositionCount() == 1; + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + assert yDel.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + SpatialCentroidCartesianPointSourceValuesAggregator.combineIntermediate(state, xVal.getDouble(0), xDel.getDouble(0), yVal.getDouble(0), yDel.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialCentroidCartesianPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..9b0d7c5f64cd7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier( + List channels) { + this.channels = channels; + } + + @Override + public SpatialCentroidCartesianPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialCentroidCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_centroid_cartesian_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..86b2f15187af6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final CentroidPointAggregator.GroupingCentroidState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction(List channels, + CentroidPointAggregator.GroupingCentroidState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction(channels, SpatialCentroidCartesianPointSourceValuesAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); + DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); + DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); + DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); + LongVector count = page.getBlock(channels.get(4)).asVector(); + assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidCartesianPointSourceValuesAggregator.combineIntermediate(state, groupId, xVal.getDouble(groupPosition + positionOffset), xDel.getDouble(groupPosition + positionOffset), yVal.getDouble(groupPosition + positionOffset), yDel.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + CentroidPointAggregator.GroupingCentroidState inState = ((SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialCentroidCartesianPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialCentroidCartesianPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..fcd17d4c5cd86 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final CentroidPointAggregator.CentroidState state; + + private final List channels; + + public SpatialCentroidGeoPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, CentroidPointAggregator.CentroidState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialCentroidGeoPointDocValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialCentroidGeoPointDocValuesAggregatorFunction(driverContext, channels, SpatialCentroidGeoPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialCentroidGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + assert xVal.getPositionCount() == 1; + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + assert xDel.getPositionCount() == 1; + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + assert yVal.getPositionCount() == 1; + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + assert yDel.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + SpatialCentroidGeoPointDocValuesAggregator.combineIntermediate(state, xVal.getDouble(0), xDel.getDouble(0), yVal.getDouble(0), yDel.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialCentroidGeoPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..46c2777e8c77a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialCentroidGeoPointDocValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialCentroidGeoPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_centroid_geo_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..0ccff1a1463ac --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,205 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final CentroidPointAggregator.GroupingCentroidState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction(List channels, + CentroidPointAggregator.GroupingCentroidState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction(channels, SpatialCentroidGeoPointDocValuesAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + SpatialCentroidGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); + DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); + DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); + DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); + LongVector count = page.getBlock(channels.get(4)).asVector(); + assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidGeoPointDocValuesAggregator.combineIntermediate(state, groupId, xVal.getDouble(groupPosition + positionOffset), xDel.getDouble(groupPosition + positionOffset), yVal.getDouble(groupPosition + positionOffset), yDel.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + CentroidPointAggregator.GroupingCentroidState inState = ((SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialCentroidGeoPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialCentroidGeoPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..be7b8d9758d1c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final CentroidPointAggregator.CentroidState state; + + private final List channels; + + public SpatialCentroidGeoPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, CentroidPointAggregator.CentroidState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialCentroidGeoPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialCentroidGeoPointSourceValuesAggregatorFunction(driverContext, channels, SpatialCentroidGeoPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + assert xVal.getPositionCount() == 1; + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + assert xDel.getPositionCount() == 1; + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + assert yVal.getPositionCount() == 1; + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + assert yDel.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + SpatialCentroidGeoPointSourceValuesAggregator.combineIntermediate(state, xVal.getDouble(0), xDel.getDouble(0), yVal.getDouble(0), yDel.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialCentroidGeoPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..02b975f03890f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialCentroidGeoPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialCentroidGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_centroid_geo_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..30ef738669914 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("xVal", ElementType.DOUBLE), + new IntermediateStateDesc("xDel", ElementType.DOUBLE), + new IntermediateStateDesc("yVal", ElementType.DOUBLE), + new IntermediateStateDesc("yDel", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final CentroidPointAggregator.GroupingCentroidState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction(List channels, + CentroidPointAggregator.GroupingCentroidState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction(channels, SpatialCentroidGeoPointSourceValuesAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); + DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); + DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); + DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); + LongVector count = page.getBlock(channels.get(4)).asVector(); + assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SpatialCentroidGeoPointSourceValuesAggregator.combineIntermediate(state, groupId, xVal.getDouble(groupPosition + positionOffset), xDel.getDouble(groupPosition + positionOffset), yVal.getDouble(groupPosition + positionOffset), yDel.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + CentroidPointAggregator.GroupingCentroidState inState = ((SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialCentroidGeoPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialCentroidGeoPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 37c91dfd836a7..df6c883e952e5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -25,5 +25,6 @@ exports org.elasticsearch.compute.operator; exports org.elasticsearch.compute.operator.exchange; exports org.elasticsearch.compute.aggregation.blockhash; + exports org.elasticsearch.compute.aggregation.spatial; exports org.elasticsearch.compute.operator.topn; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index 28a7bd751acc4..dd760408b3be5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -34,7 +34,7 @@ */ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash,// - PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash { + NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash { protected final BigArrays bigArrays; protected final BlockFactory blockFactory; @@ -107,6 +107,7 @@ public static BlockHash build( */ private static BlockHash newForElementType(int channel, ElementType type, DriverContext driverContext) { return switch (type) { + case NULL -> new NullBlockHash(channel, driverContext); case BOOLEAN -> new BooleanBlockHash(channel, driverContext); case INT -> new IntBlockHash(channel, driverContext); case LONG -> new LongBlockHash(channel, driverContext); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index aa7c737e331c7..79da105a9adaa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -100,6 +100,7 @@ public IntVector nonEmpty() { } } + @Override public BitArray seenGroupIds(BigArrays bigArrays) { BitArray seen = new BitArray(everSeen.length, bigArrays); for (int i = 0; i < everSeen.length; i++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java new file mode 100644 index 0000000000000..0c658ade236fd --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * Maps a {@link BooleanBlock} column to group ids. Assigns group + * {@code 0} to {@code false} and group {@code 1} to {@code true}. + */ +final class NullBlockHash extends BlockHash { + private final int channel; + private boolean seenNull = false; + + NullBlockHash(int channel, DriverContext driverContext) { + super(driverContext); + this.channel = channel; + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + var block = page.getBlock(channel); + if (block.areAllValuesNull()) { + seenNull = true; + try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { + addInput.add(0, groupIds); + } + } else { + throw new IllegalArgumentException("can't use NullBlockHash for non-null blocks"); + } + } + + @Override + public Block[] getKeys() { + return new Block[] { blockFactory.newConstantNullBlock(seenNull ? 1 : 0) }; + } + + @Override + public IntVector nonEmpty() { + return blockFactory.newConstantIntVector(0, seenNull ? 1 : 0); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + BitArray seen = new BitArray(1, bigArrays); + if (seenNull) { + seen.set(0); + } + return seen; + } + + @Override + public void close() { + // Nothing to close + } + + @Override + public String toString() { + return "NullBlockHash{channel=" + channel + ", seenNull=" + seenNull + '}'; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java new file mode 100644 index 0000000000000..1fc2430393c98 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -0,0 +1,281 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.aggregation.AggregatorState; +import org.elasticsearch.compute.aggregation.GroupingAggregatorState; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.search.aggregations.metrics.CompensatedSum; + +import java.nio.ByteOrder; + +/** + * This aggregator calculates the centroid of a set of geo points or cartesian_points. + * It is assumes that the points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +abstract class CentroidPointAggregator { + + public static void combine(CentroidState current, double xVal, double xDel, double yVal, double yDel, long count) { + current.add(xVal, xDel, yVal, yDel, count); + } + + public static void combineStates(CentroidState current, CentroidState state) { + current.add(state); + } + + public static void combineIntermediate(CentroidState state, double xIn, double dx, double yIn, double dy, long count) { + if (count > 0) { + combine(state, xIn, dx, yIn, dy, count); + } + } + + public static void evaluateIntermediate(CentroidState state, DriverContext driverContext, Block[] blocks, int offset) { + assert blocks.length >= offset + 5; + BlockFactory blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantDoubleBlockWith(state.xSum.value(), 1); + blocks[offset + 1] = blockFactory.newConstantDoubleBlockWith(state.xSum.delta(), 1); + blocks[offset + 2] = blockFactory.newConstantDoubleBlockWith(state.ySum.value(), 1); + blocks[offset + 3] = blockFactory.newConstantDoubleBlockWith(state.ySum.delta(), 1); + blocks[offset + 4] = blockFactory.newConstantLongBlockWith(state.count, 1); + } + + public static Block evaluateFinal(CentroidState state, DriverContext driverContext) { + return driverContext.blockFactory().newConstantBytesRefBlockWith(state.encodeCentroidResult(), 1); + } + + public static void combineStates(GroupingCentroidState current, int groupId, GroupingCentroidState state, int statePosition) { + if (state.hasValue(statePosition)) { + current.add( + state.xValues.get(statePosition), + state.xDeltas.get(statePosition), + state.yValues.get(statePosition), + state.yDeltas.get(statePosition), + state.counts.get(statePosition), + groupId + ); + } + } + + public static void combineIntermediate( + GroupingCentroidState current, + int groupId, + double xValue, + double xDelta, + double yValue, + double yDelta, + long count + ) { + if (count > 0) { + current.add(xValue, xDelta, yValue, yDelta, count, groupId); + } + } + + public static void evaluateIntermediate( + GroupingCentroidState state, + Block[] blocks, + int offset, + IntVector selected, + DriverContext driverContext + ) { + assert blocks.length >= offset + 5; + try ( + var xValuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var xDeltaBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var yValuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var yDeltaBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var countsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int group = selected.getInt(i); + if (group < state.xValues.size()) { + xValuesBuilder.appendDouble(state.xValues.get(group)); + xDeltaBuilder.appendDouble(state.xDeltas.get(group)); + yValuesBuilder.appendDouble(state.yValues.get(group)); + yDeltaBuilder.appendDouble(state.yDeltas.get(group)); + countsBuilder.appendLong(state.counts.get(group)); + } else { + xValuesBuilder.appendDouble(0); + xDeltaBuilder.appendDouble(0); + yValuesBuilder.appendDouble(0); + yDeltaBuilder.appendDouble(0); + countsBuilder.appendLong(0); + } + } + blocks[offset + 0] = xValuesBuilder.build(); + blocks[offset + 1] = xDeltaBuilder.build(); + blocks[offset + 2] = yValuesBuilder.build(); + blocks[offset + 3] = yDeltaBuilder.build(); + blocks[offset + 4] = countsBuilder.build(); + } + } + + public static Block evaluateFinal(GroupingCentroidState state, IntVector selected, DriverContext driverContext) { + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int si = selected.getInt(i); + if (state.hasValue(si) && si < state.xValues.size()) { + BytesRef result = state.encodeCentroidResult(si); + builder.appendBytesRef(result); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static BytesRef encode(double x, double y) { + return new BytesRef(WellKnownBinary.toWKB(new Point(x, y), ByteOrder.LITTLE_ENDIAN)); + } + + static class CentroidState implements AggregatorState { + protected final CompensatedSum xSum = new CompensatedSum(0, 0); + protected final CompensatedSum ySum = new CompensatedSum(0, 0); + protected long count = 0; + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + CentroidPointAggregator.evaluateIntermediate(this, driverContext, blocks, offset); + } + + @Override + public void close() {} + + public void count(long count) { + this.count = count; + } + + public void add(CentroidState other) { + xSum.add(other.xSum.value(), other.xSum.delta()); + ySum.add(other.ySum.value(), other.ySum.delta()); + count += other.count; + } + + public void add(double x, double y) { + xSum.add(x); + ySum.add(y); + count++; + } + + public void add(double x, double dx, double y, double dy, long count) { + xSum.add(x, dx); + ySum.add(y, dy); + this.count += count; + } + + protected BytesRef encodeCentroidResult() { + double x = xSum.value() / count; + double y = ySum.value() / count; + return encode(x, y); + } + } + + static class GroupingCentroidState implements GroupingAggregatorState { + private final BigArrays bigArrays; + + DoubleArray xValues; + DoubleArray xDeltas; + DoubleArray yValues; + DoubleArray yDeltas; + + LongArray counts; + + GroupingCentroidState(BigArrays bigArrays) { + this.bigArrays = bigArrays; + boolean success = false; + try { + this.xValues = bigArrays.newDoubleArray(1); + this.xDeltas = bigArrays.newDoubleArray(1); + this.yValues = bigArrays.newDoubleArray(1); + this.yDeltas = bigArrays.newDoubleArray(1); + this.counts = bigArrays.newLongArray(1); + success = true; + } finally { + if (success == false) { + close(); + } + } + } + + void add(double x, double dx, double y, double dy, long count, int groupId) { + ensureCapacity(groupId); + + // If the value is Inf or NaN, just add it to the running tally to "convert" to + // Inf/NaN. This keeps the behavior bwc from before kahan summing + if (Double.isFinite(x) == false || Double.isFinite(y) == false) { + xValues.increment(groupId, x); + yValues.increment(groupId, y); + return; + } + + addTo(xValues, xDeltas, groupId, x, dx); + addTo(yValues, yDeltas, groupId, y, dy); + counts.increment(groupId, count); + } + + private static void addTo(DoubleArray values, DoubleArray deltas, int groupId, double valueToAdd, double deltaToAdd) { + double value = values.get(groupId); + if (Double.isFinite(value) == false) { + // It isn't going to get any more infinite. + return; + } + double delta = deltas.get(groupId); + double correctedSum = valueToAdd + (delta + deltaToAdd); + double updatedValue = value + correctedSum; + deltas.set(groupId, correctedSum - (updatedValue - value)); + values.set(groupId, updatedValue); + } + + boolean hasValue(int index) { + return counts.get(index) > 0; + } + + /** Needed for generated code that does null tracking, which we do not need because we use count */ + final void enableGroupIdTracking(SeenGroupIds ignore) {} + + private void ensureCapacity(int groupId) { + if (groupId >= xValues.size()) { + xValues = bigArrays.grow(xValues, groupId + 1); + xDeltas = bigArrays.grow(xDeltas, groupId + 1); + yValues = bigArrays.grow(yValues, groupId + 1); + yDeltas = bigArrays.grow(yDeltas, groupId + 1); + counts = bigArrays.grow(counts, groupId + 1); + } + } + + protected BytesRef encodeCentroidResult(int si) { + long count = counts.get(si); + double x = xValues.get(si) / count; + double y = yValues.get(si) / count; + return encode(x, y); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + CentroidPointAggregator.evaluateIntermediate(this, blocks, offset, selected, driverContext); + } + + @Override + public void close() { + Releasables.close(xValues, xDeltas, yValues, yDeltas, counts); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java new file mode 100644 index 0000000000000..0bafb6f8112de --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.XYEncodingUtils; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * This aggregator calculates the centroid of a set of cartesian points. + * It is assumes that the cartesian points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "xVal", type = "DOUBLE"), + @IntermediateState(name = "xDel", type = "DOUBLE"), + @IntermediateState(name = "yVal", type = "DOUBLE"), + @IntermediateState(name = "yDel", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +class SpatialCentroidCartesianPointDocValuesAggregator extends CentroidPointAggregator { + + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } + + public static void combine(CentroidState current, long v) { + current.add(decodeX(v), decodeY(v)); + } + + public static void combine(GroupingCentroidState current, int groupId, long encoded) { + current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); + } + + private static double decodeX(long encoded) { + return XYEncodingUtils.decode((int) (encoded >>> 32)); + } + + private static double decodeY(long encoded) { + return XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFFL)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..5673892be4bf0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +/** + * This aggregator calculates the centroid of a set of cartesian points. + * It is assumes that the cartesian points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialCentroidCartesianPointSourceValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "xVal", type = "DOUBLE"), + @IntermediateState(name = "xDel", type = "DOUBLE"), + @IntermediateState(name = "yVal", type = "DOUBLE"), + @IntermediateState(name = "yDel", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +class SpatialCentroidCartesianPointSourceValuesAggregator extends CentroidPointAggregator { + + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } + + public static void combine(CentroidState current, BytesRef wkb) { + Point point = decode(wkb); + current.add(point.getX(), point.getY()); + } + + public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { + Point point = decode(wkb); + current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); + } + + private static Point decode(BytesRef wkb) { + return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java new file mode 100644 index 0000000000000..ee5ab0e292547 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * This aggregator calculates the centroid of a set of geo points. It is assumes that the geo points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "xVal", type = "DOUBLE"), + @IntermediateState(name = "xDel", type = "DOUBLE"), + @IntermediateState(name = "yVal", type = "DOUBLE"), + @IntermediateState(name = "yDel", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +class SpatialCentroidGeoPointDocValuesAggregator extends CentroidPointAggregator { + + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } + + public static void combine(CentroidState current, long v) { + current.add(decodeX(v), decodeY(v)); + } + + public static void combine(GroupingCentroidState current, int groupId, long encoded) { + current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); + } + + private static double decodeX(long encoded) { + return GeoEncodingUtils.decodeLongitude((int) (encoded & 0xFFFFFFFFL)); + } + + private static double decodeY(long encoded) { + return GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..caf55dcc2f4e1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +/** + * This aggregator calculates the centroid of a set of geo points. + * It is assumes that the geo points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialCentroidGeoPointDocValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "xVal", type = "DOUBLE"), + @IntermediateState(name = "xDel", type = "DOUBLE"), + @IntermediateState(name = "yVal", type = "DOUBLE"), + @IntermediateState(name = "yDel", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +class SpatialCentroidGeoPointSourceValuesAggregator extends CentroidPointAggregator { + + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } + + public static void combine(CentroidState current, BytesRef wkb) { + Point point = decode(wkb); + current.add(point.getX(), point.getY()); + } + + public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { + Point point = decode(wkb); + current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); + } + + private static Point decode(BytesRef wkb) { + return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index c89a0ce260c67..5a6d7cb4a6003 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -107,8 +107,7 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R boolean mayHaveMultivaluedFields(); /** - * Creates a new block that only exposes the positions provided. Materialization of the selected positions is avoided. - * The new block may hold a reference to this block, increasing this block's reference count. + * Creates a new block that only exposes the positions provided. * @param positions the positions to retain * @return a filtered block * TODO: pass BlockFactory diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index fb83432ba0565..10e9237ef7071 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -43,10 +43,6 @@ public SingletonOrdinalsBuilder appendOrd(int value) { return this; } - int[] ords() { - return ords; - } - @Override public SingletonOrdinalsBuilder beginPositionEntry() { throw new UnsupportedOperationException("should only have one value per doc"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index e24d355bf2c24..20395ff27b1b4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -83,7 +83,6 @@ $endif$ @Override public $Type$Block filter(int... positions) { - // TODO use reference counting to share the vector $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st index 71d6005a9fc17..d65c54b5e2b24 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st @@ -68,7 +68,6 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty @Override public $Type$Block filter(int... positions) { - // TODO use reference counting to share the vector try (var builder = blockFactory().new$Type$BlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java deleted file mode 100644 index 95b3ee9c10ff0..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.index.mapper.BlockLoader; -import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.search.lookup.SearchLookup; - -import java.util.Set; - -/** - * Resolves *how* ESQL loads field values. - */ -public final class BlockReaderFactories { - private BlockReaderFactories() {} - - /** - * Resolves *how* ESQL loads field values. - * @param ctx a search context for the index we're loading field from - * @param fieldName the name of the field to load - * @param asUnsupportedSource should the field be loaded as "unsupported"? - * These will always have {@code null} values - */ - public static BlockLoader loader(SearchExecutionContext ctx, String fieldName, boolean asUnsupportedSource) { - if (asUnsupportedSource) { - return BlockLoader.CONSTANT_NULLS; - } - MappedFieldType fieldType = ctx.getFieldType(fieldName); - if (fieldType == null) { - // the field does not exist in this context - return BlockLoader.CONSTANT_NULLS; - } - BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() { - @Override - public String indexName() { - return ctx.getFullyQualifiedIndex().getName(); - } - - @Override - public SearchLookup lookup() { - return ctx.lookup(); - } - - @Override - public Set sourcePaths(String name) { - return ctx.sourcePath(name); - } - - @Override - public String parentField(String field) { - return ctx.parentPath(field); - } - - @Override - public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { - return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME); - } - }); - if (loader == null) { - HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName); - return BlockLoader.CONSTANT_NULLS; - } - - return loader; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 4ed32d6552497..4dda5c16295fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.io.UncheckedIOException; @@ -49,8 +48,8 @@ public static class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int limit @@ -58,7 +57,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 21b2a4cfaeb0b..1eeedd06d058d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -26,7 +26,6 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -90,11 +89,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { continue; } processedSlices++; - processedShards.add( - currentSlice.searchContext().getSearchExecutionContext().getFullyQualifiedIndex().getName() - + ":" - + currentSlice.searchContext().getSearchExecutionContext().getShardId() - ); + processedShards.add(currentSlice.shardContext().shardIdentifier()); } final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++); logger.trace("Starting {}", partialLeaf); @@ -102,7 +97,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { if (currentScorer == null || currentScorer.leafReaderContext() != leaf) { final Weight weight = currentSlice.weight().get(); processedQueries.add(weight.getQuery()); - currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf); + currentScorer = new LuceneScorer(currentSlice.shardContext(), weight, leaf); } assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc(); currentScorer.maxPosition = partialLeaf.maxDoc(); @@ -118,8 +113,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { * Wraps a {@link BulkScorer} with shard information */ static final class LuceneScorer { - private final int shardIndex; - private final SearchContext searchContext; + private final ShardContext shardContext; private final Weight weight; private final LeafReaderContext leafReaderContext; @@ -128,9 +122,8 @@ static final class LuceneScorer { private int maxPosition; private Thread executingThread; - LuceneScorer(int shardIndex, SearchContext searchContext, Weight weight, LeafReaderContext leafReaderContext) { - this.shardIndex = shardIndex; - this.searchContext = searchContext; + LuceneScorer(ShardContext shardContext, Weight weight, LeafReaderContext leafReaderContext) { + this.shardContext = shardContext; this.weight = weight; this.leafReaderContext = leafReaderContext; reinitialize(); @@ -165,12 +158,8 @@ void markAsDone() { position = DocIdSetIterator.NO_MORE_DOCS; } - int shardIndex() { - return shardIndex; - } - - SearchContext searchContext() { - return searchContext; + ShardContext shardContext() { + return shardContext; } Weight weight() { @@ -377,7 +366,7 @@ public String toString() { } } - static Function weightFunction(Function queryFunction, ScoreMode scoreMode) { + static Function weightFunction(Function queryFunction, ScoreMode scoreMode) { return ctx -> { final var query = queryFunction.apply(ctx); final var searcher = ctx.searcher(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java index c3fe03ae88bb3..716df6844e79f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.search.Weight; -import org.elasticsearch.search.internal.SearchContext; import java.util.List; import java.util.function.Supplier; @@ -16,8 +15,7 @@ /** * Holds a list of multiple partial Lucene segments */ -public record LuceneSlice(int shardIndex, SearchContext searchContext, List leaves, Supplier weight) { - +public record LuceneSlice(ShardContext shardContext, List leaves, Supplier weight) { int numLeaves() { return leaves.size(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index faf3d6437282a..d0329174f2839 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Weight; import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.internal.SearchContext; import java.util.ArrayList; import java.util.Arrays; @@ -48,24 +47,23 @@ public int totalSlices() { } public static LuceneSliceQueue create( - List searchContexts, - Function weightFunction, + List contexts, + Function weightFunction, DataPartitioning dataPartitioning, int taskConcurrency ) { final List slices = new ArrayList<>(); - for (int shardIndex = 0; shardIndex < searchContexts.size(); shardIndex++) { - final SearchContext searchContext = searchContexts.get(shardIndex); - final List leafContexts = searchContext.searcher().getLeafContexts(); + for (ShardContext ctx : contexts) { + final List leafContexts = ctx.searcher().getLeafContexts(); List> groups = switch (dataPartitioning) { case SHARD -> Collections.singletonList(leafContexts.stream().map(PartialLeafReaderContext::new).toList()); case SEGMENT -> segmentSlices(leafContexts); - case DOC -> docSlices(searchContext.searcher().getIndexReader(), taskConcurrency); + case DOC -> docSlices(ctx.searcher().getIndexReader(), taskConcurrency); }; final Weight[] cachedWeight = new Weight[1]; final Supplier weight = () -> { if (cachedWeight[0] == null) { - cachedWeight[0] = weightFunction.apply(searchContext); + cachedWeight[0] = weightFunction.apply(ctx); } return cachedWeight[0]; }; @@ -73,7 +71,7 @@ public static LuceneSliceQueue create( weight.get(); // eagerly build Weight once } for (List group : groups) { - slices.add(new LuceneSlice(shardIndex, searchContext, group, weight)); + slices.add(new LuceneSlice(ctx, group, weight)); } } return new LuceneSliceQueue(slices); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index b636e4aba8a5e..9d6e3f46d0e1e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.io.UncheckedIOException; @@ -46,8 +45,8 @@ public static class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int maxPageSize, @@ -57,7 +56,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @@ -149,7 +148,7 @@ public Page getOutput() { IntBlock leaf = null; IntVector docs = null; try { - shard = blockFactory.newConstantIntBlockWith(scorer.shardIndex(), currentPagePos); + shard = blockFactory.newConstantIntBlockWith(scorer.shardContext().index(), currentPagePos); leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos); docs = docsBuilder.build(); docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 7f08c8ca66821..8cb9173adc197 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -24,7 +24,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; @@ -49,8 +48,8 @@ public static final class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int maxPageSize, @@ -62,7 +61,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @@ -156,9 +155,9 @@ private Page collect() { return emit(true); } try { - if (perShardCollector == null || perShardCollector.shardIndex != scorer.shardIndex()) { + if (perShardCollector == null || perShardCollector.shardContext.index() != scorer.shardContext().index()) { // TODO: share the bottom between shardCollectors - perShardCollector = new PerShardCollector(scorer.shardIndex(), scorer.searchContext(), sorts, limit); + perShardCollector = new PerShardCollector(scorer.shardContext(), sorts, limit); } var leafCollector = perShardCollector.getLeafCollector(scorer.leafReaderContext()); scorer.scoreNextRange(leafCollector, scorer.leafReaderContext().reader().getLiveDocs(), maxPageSize); @@ -170,7 +169,7 @@ private Page collect() { } if (scorer.isDone()) { var nextScorer = getCurrentOrLoadNextScorer(); - if (nextScorer == null || nextScorer.shardIndex() != scorer.shardIndex()) { + if (nextScorer == null || nextScorer.shardContext().index() != scorer.shardContext().index()) { return emit(true); } } @@ -205,7 +204,7 @@ private Page emit(boolean startEmitting) { ) { int start = offset; offset += size; - List leafContexts = perShardCollector.searchContext.searcher().getLeafContexts(); + List leafContexts = perShardCollector.shardContext.searcher().getLeafContexts(); for (int i = start; i < offset; i++) { int doc = scoreDocs[i].doc; int segment = ReaderUtil.subIndex(doc, leafContexts); @@ -213,7 +212,7 @@ private Page emit(boolean startEmitting) { currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment } - shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardIndex, size); + shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardContext.index(), size); segments = currentSegmentBuilder.build(); docs = currentDocsBuilder.build(); page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock()); @@ -233,17 +232,15 @@ protected void describe(StringBuilder sb) { } static final class PerShardCollector { - private final int shardIndex; - private final SearchContext searchContext; + private final ShardContext shardContext; private final TopFieldCollector topFieldCollector; private int leafIndex; private LeafCollector leafCollector; private Thread currentThread; - PerShardCollector(int shardIndex, SearchContext searchContext, List> sorts, int limit) throws IOException { - this.shardIndex = shardIndex; - this.searchContext = searchContext; - Optional sortAndFormats = SortBuilder.buildSort(sorts, searchContext.getSearchExecutionContext()); + PerShardCollector(ShardContext shardContext, List> sorts, int limit) throws IOException { + this.shardContext = shardContext; + Optional sortAndFormats = shardContext.buildSort(sorts); if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java new file mode 100644 index 0000000000000..5bf6ac8532f48 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.search.IndexSearcher; +import org.elasticsearch.search.sort.SortAndFormats; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; + +/** + * Context of each shard we're operating against. + */ +public interface ShardContext { + /** + * The index of this shard in the list of shards being processed. + */ + int index(); + + /** + * Get {@link IndexSearcher} holding the actual data. + */ + IndexSearcher searcher(); + + /** + * Build a "sort" configuration from an Elasticsearch style builder. + */ + Optional buildSort(List> sorts) throws IOException; + + /** + * A "name" for the shard that you can look up against other APIs like + * {@code _cat/shards}. + */ + String shardIdentifier(); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java index 44b5e02760e5c..716f251851d24 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java @@ -49,7 +49,8 @@ public static Decoder decoder(ElementType elementType) { case DOUBLE -> new DoublesDecoder(); case BYTES_REF -> new BytesRefsDecoder(); case BOOLEAN -> new BooleansDecoder(); - default -> throw new IllegalArgumentException("can't encode " + elementType); + case NULL -> new NullsDecoder(); + default -> throw new IllegalArgumentException("can't decode " + elementType); }; } @@ -651,4 +652,17 @@ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { throw new IllegalStateException("all positions all nulls"); } } + + private static class NullsDecoder implements Decoder { + @Override + public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) { + for (int i = 0; i < count; i++) { + if (isNull.isNull(i)) { + builder.appendNull(); + } else { + throw new IllegalArgumentException("NullsDecoder requires that all positions are null"); + } + } + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java index 36aa8621062a5..a7caed1508091 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java @@ -35,6 +35,7 @@ public static Block dedupeToBlockAdaptive(Block block, BlockFactory blockFactory case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockAdaptive(blockFactory); case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockAdaptive(blockFactory); case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockAdaptive(blockFactory); + case NULL -> block; default -> throw new IllegalArgumentException(); }; } @@ -52,6 +53,7 @@ public static Block dedupeToBlockUsingCopyMissing(Block block, BlockFactory bloc case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockUsingCopyMissing(blockFactory); case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockUsingCopyMissing(blockFactory); case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockUsingCopyMissing(blockFactory); + case NULL -> block; default -> throw new IllegalArgumentException(); }; } @@ -71,6 +73,7 @@ public static Block dedupeToBlockUsingCopyAndSort(Block block, BlockFactory bloc case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockUsingCopyAndSort(blockFactory); case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockUsingCopyAndSort(blockFactory); case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockUsingCopyAndSort(blockFactory); + case NULL -> block; default -> throw new IllegalArgumentException(); }; } @@ -118,6 +121,9 @@ public record HashResult(IntBlock ords, boolean sawNull) {} */ public static BatchEncoder batchEncoder(Block block, int batchSize, boolean allowDirectEncoder) { if (block.areAllValuesNull()) { + if (allowDirectEncoder == false) { + throw new IllegalArgumentException("null blocks can only be directly encoded"); + } return new BatchEncoder.DirectNulls(block); } var elementType = block.elementType(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java index 63dbdf2be09bf..fd6589bf5a913 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java @@ -39,7 +39,7 @@ public interface Operator extends Releasable { * non-trivial overhead and it's just not worth building even * smaller blocks without under normal circumstances. */ - int MIN_TARGET_PAGE_SIZE = 10; + int MIN_TARGET_PAGE_SIZE = 32; /** * whether the given operator can accept more input pages diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 3173b716467be..361cea599637c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -169,7 +169,7 @@ private class OpenExchangeRequestHandler implements TransportRequestHandler drivers = new ArrayList<>(); try { - Set actualDocIds = Collections.newSetFromMap(ConcurrentCollections.newConcurrentMap()); + Set actualDocIds = ConcurrentCollections.newConcurrentSet(); for (int t = 0; t < factory.taskConcurrency(); t++) { PageConsumerOperator docCollector = new PageConsumerOperator(page -> { DocVector docVector = page.getBlock(0).asVector(); @@ -345,7 +345,7 @@ public static void assertDriverContext(DriverContext driverContext) { } static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) { - final SearchContext searchContext = mockSearchContext(reader, 0); + final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0); return new LuceneSourceOperator.Factory( List.of(searchContext), ctx -> query, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 5dfba49b404e8..2dc527ce213d6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -140,7 +140,7 @@ private void test(MockBlockFactory blockFactory) { randomBlocks[g] = BasicBlockTests.randomBlock( types.get(g), positionCount, - randomBoolean(), + types.get(g) == ElementType.NULL ? true : randomBoolean(), 1, maxValuesPerPosition, 0, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 0ccf2d3af04d9..4e392ca24dada 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -604,6 +604,20 @@ public void testBooleanHashWithMultiValuedFields() { } } + public void testNullHash() { + Object[] values = new Object[] { null, null, null, null }; + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:NULL], entries=1, size=")); + } else { + assertThat(ordsAndKeys.description, equalTo("NullBlockHash{channel=0, seenNull=true}")); + } + assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); + assertThat(ordsAndKeys.nonEmpty, equalTo(TestBlockFactory.getNonBreakingInstance().newConstantIntVector(0, 1))); + assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { null } }); + }, blockFactory.newConstantNullBlock(values.length)); + } + public void testLongLongHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; long[] values2 = new long[] { 0, 0, 0, 1, 1, 1 }; @@ -1083,6 +1097,22 @@ public void testBytesRefLongHashHugeCombinatorialExplosion() { } } + public void testLongNull() { + long[] values = new long[] { 0, 1, 0, 2, 3, 1 }; + hash(ordsAndKeys -> { + Object[][] expectedKeys = { + new Object[] { 0L, null }, + new Object[] { 1L, null }, + new Object[] { 2L, null }, + new Object[] { 3L, null } }; + + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:NULL], entries=4, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 1); + assertKeys(ordsAndKeys.keys, expectedKeys); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, blockFactory.newLongArrayVector(values, values.length).asBlock(), blockFactory.newConstantNullBlock(values.length)); + } + record OrdsAndKeys(String description, int positionOffset, IntBlock ords, Block[] keys, IntVector nonEmpty) {} /** diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 7681b147824a5..36c7a3f178282 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -447,11 +447,11 @@ public void testBytesRefBlock() { } public void testBytesRefBlockOnGeoPoints() { - testBytesRefBlock(() -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), false, GEO::wkbAsString); + testBytesRefBlock(() -> GEO.asWkb(GeometryTestUtils.randomPoint()), false, GEO::wkbToWkt); } public void testBytesRefBlockOnCartesianPoints() { - testBytesRefBlock(() -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), false, CARTESIAN::wkbAsString); + testBytesRefBlock(() -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), false, CARTESIAN::wkbToWkt); } public void testBytesRefBlockBuilderWithNulls() { @@ -899,6 +899,12 @@ public static RandomBlock randomBlock( boolean bytesRefFromPoints = randomBoolean(); Supplier pointSupplier = randomBoolean() ? GeometryTestUtils::randomPoint : ShapeTestUtils::randomPoint; for (int p = 0; p < positionCount; p++) { + if (elementType == ElementType.NULL) { + assert nullAllowed; + values.add(null); + builder.appendNull(); + continue; + } int valueCount = between(minValuesPerPosition, maxValuesPerPosition); if (valueCount == 0 || nullAllowed && randomBoolean()) { values.add(null); @@ -930,7 +936,7 @@ public static RandomBlock randomBlock( } case BYTES_REF -> { BytesRef b = bytesRefFromPoints - ? GEO.pointAsWKB(pointSupplier.get()) + ? GEO.asWkb(pointSupplier.get()) : new BytesRef(randomRealisticUnicodeOfLength(4)); valuesAtPosition.add(b); ((BytesRefBlock.Builder) builder).appendBytesRef(b); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index 3c822da7b5586..a48e22e9ccefa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -46,11 +46,11 @@ public BlockBuilderTests(ElementType elementType) { } public void testAllNulls() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { + for (int numEntries : List.of(1, between(1, 100), between(101, 1000))) { testAllNullsImpl(elementType.newBlockBuilder(0, blockFactory), numEntries); - testAllNullsImpl(elementType.newBlockBuilder(100, blockFactory), numEntries); - testAllNullsImpl(elementType.newBlockBuilder(1000, blockFactory), numEntries); - testAllNullsImpl(elementType.newBlockBuilder(randomIntBetween(0, 100), blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(numEntries, blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(numEntries * 10, blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(between(0, numEntries), blockFactory), numEntries); } } @@ -60,17 +60,14 @@ private void testAllNullsImpl(Block.Builder builder, int numEntries) { } try (Block block = builder.build()) { assertThat(block.getPositionCount(), is(numEntries)); - assertThat(block.isNull(0), is(true)); - assertThat(block.isNull(numEntries - 1), is(true)); - assertThat(block.isNull(randomPosition(numEntries)), is(true)); + for (int p = 0; p < numEntries; p++) { + assertThat(block.isNull(p), is(true)); + } + assertThat(block.areAllValuesNull(), is(true)); } assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - static int randomPosition(int positionCount) { - return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); - } - public void testCloseWithoutBuilding() { elementType.newBlockBuilder(10, blockFactory).close(); assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index df1662e1dfb6d..2f9cf6ec57775 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -12,11 +12,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.core.Releasables; +import org.elasticsearch.test.BreakerTestUtil; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -62,34 +64,31 @@ public void testNonDecreasingDescendingDocs() { docs.close(); } - private static int MAX_BUILD_BREAKS_LIMIT = 1391; - public void testBuildBreaks() { - testBuildBreaks(ByteSizeValue.ofBytes(between(0, MAX_BUILD_BREAKS_LIMIT))); - } - - public void testBuildBreaksMax() { - testBuildBreaks(ByteSizeValue.ofBytes(MAX_BUILD_BREAKS_LIMIT)); - } - - private void testBuildBreaks(ByteSizeValue limit) { - int size = 100; - BlockFactory blockFactory = blockFactory(limit); - Exception e = expectThrows(CircuitBreakingException.class, () -> { - try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { - for (int r = 0; r < size; r++) { - builder.appendShard(3 - size % 4); - builder.appendSegment(size % 10); - builder.appendDoc(size); - } - builder.build().close(); - } + var maxBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + buildDocBlock(blockFactory).close(); }); + var limit = ByteSizeValue.ofBytes(randomLongBetween(0, maxBreakLimit.getBytes())); + BlockFactory blockFactory = blockFactory(limit); + Exception e = expectThrows(CircuitBreakingException.class, () -> buildDocBlock(blockFactory).close()); assertThat(e.getMessage(), equalTo("over test limit")); logger.info("break position", e); assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + private DocBlock buildDocBlock(BlockFactory blockFactory) { + int size = 100; + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { + for (int r = 0; r < size; r++) { + builder.appendShard(3 - r % 4); + builder.appendSegment(r % 10); + builder.appendDoc(size); + } + return builder.build(); + } + } + public void testShardSegmentDocMap() { assertShardSegmentDocMap( new int[][] { @@ -171,25 +170,31 @@ private void assertShardSegmentDocMap(int[][] data, int[][] expected) { assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - // TODO these are really difficult to maintain. can we figure these out of the fly? - private static final int MAX_SHARD_SEGMENT_DOC_MAP_BREAKS = 2220; - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104191") public void testShardSegmentDocMapBreaks() { - testShardSegmentDocMapBreaks(ByteSizeValue.ofBytes(between(MAX_BUILD_BREAKS_LIMIT + 1, MAX_SHARD_SEGMENT_DOC_MAP_BREAKS))); - } - - public void testShardSegmentDocMapBreaksMax() { - testShardSegmentDocMapBreaks(ByteSizeValue.ofBytes(MAX_SHARD_SEGMENT_DOC_MAP_BREAKS)); + ByteSizeValue buildBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + buildDocBlock(blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + }); + ByteSizeValue docMapBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + try (DocBlock docBlock = buildDocBlock(blockFactory)) { + docBlock.asVector().shardSegmentDocMapForwards(); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + }); + var limit = ByteSizeValue.ofBytes(randomLongBetween(buildBreakLimit.getBytes() + 1, docMapBreakLimit.getBytes())); + BlockFactory blockFactory = blockFactory(limit); + testShardSegmentDocMapBreaks(blockFactory); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - private void testShardSegmentDocMapBreaks(ByteSizeValue limit) { + private void testShardSegmentDocMapBreaks(BlockFactory blockFactory) { int size = 100; - BlockFactory blockFactory = blockFactory(limit); try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { for (int r = 0; r < size; r++) { - builder.appendShard(3 - size % 4); - builder.appendSegment(size % 10); + builder.appendShard(3 - r % 4); + builder.appendSegment(r % 10); builder.appendDoc(size); } try (DocBlock docBlock = builder.build()) { @@ -255,15 +260,36 @@ public void testFilter() { } public void testFilterBreaks() { - BlockFactory factory = blockFactory(ByteSizeValue.ofBytes(between(250, 370))); - try ( - DocVector docs = new DocVector( - factory.newConstantIntVector(0, 10), - factory.newConstantIntVector(0, 10), - factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), - false - ) - ) { + Function buildDocVector = factory -> { + IntVector shards = null; + IntVector segments = null; + IntVector docs = null; + DocVector result = null; + try { + shards = factory.newConstantIntVector(0, 10); + segments = factory.newConstantIntVector(0, 10); + docs = factory.newConstantIntVector(0, 10); + result = new DocVector(shards, segments, docs, false); + return result; + } finally { + if (result == null) { + Releasables.close(shards, segments, docs); + } + } + }; + ByteSizeValue buildBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory factory = blockFactory(limit); + buildDocVector.apply(factory).close(); + }); + ByteSizeValue filterBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory factory = blockFactory(limit); + try (DocVector docs = buildDocVector.apply(factory)) { + docs.filter(1, 2, 3).close(); + } + }); + ByteSizeValue limit = ByteSizeValue.ofBytes(randomLongBetween(buildBreakLimit.getBytes() + 1, filterBreakLimit.getBytes())); + BlockFactory factory = blockFactory(limit); + try (DocVector docs = buildDocVector.apply(factory)) { Exception e = expectThrows(CircuitBreakingException.class, () -> docs.filter(1, 2, 3)); assertThat(e.getMessage(), equalTo("over test limit")); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 8d401c2099b85..0c41cfc704f56 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.search.internal.SearchContext; import org.junit.After; import java.io.IOException; @@ -38,7 +37,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.Mockito.when; public class LuceneCountOperatorTests extends AnyOperatorTestCase { private Directory directory = newDirectory(); @@ -82,8 +80,7 @@ private LuceneCountOperator.Factory simple(DataPartitioning dataPartitioning, in throw new RuntimeException(e); } - SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); + ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); final Query query; if (enableShortcut && randomBoolean()) { query = new MatchAllDocsQuery(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 19e16144e11c5..a4c6622344bea 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -27,26 +27,19 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortAndFormats; +import org.elasticsearch.search.sort.SortBuilder; import org.junit.After; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.function.Function; import static org.hamcrest.Matchers.both; @@ -55,10 +48,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class LuceneSourceOperatorTests extends AnyOperatorTestCase { private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); @@ -97,24 +86,8 @@ private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, i throw new RuntimeException(e); } - SearchContext ctx = mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { - String name = inv.getArgument(0); - return switch (name) { - case "s" -> S_FIELD; - default -> throw new IllegalArgumentException("don't support [" + name + "]"); - }; - }); - when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { - MappedFieldType ft = inv.getArgument(0); - IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); - // This breaker is for fielddata from text fields. We don't test it so it won't break not test not to use a breaker here. - return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - }); - when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); - when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); - Function queryFunction = c -> new MatchAllDocsQuery(); + ShardContext ctx = new MockShardContext(reader, 0); + Function queryFunction = c -> new MatchAllDocsQuery(); int maxPageSize = between(10, Math.max(10, numDocs)); return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit); } @@ -206,24 +179,43 @@ private void testSimple(DriverContext ctx, int size, int limit) { * Creates a mock search context with the given index reader. * The returned mock search context can be used to test with {@link LuceneOperator}. */ - public static SearchContext mockSearchContext(IndexReader reader, int shardId) { - try { - ContextIndexSearcher searcher = new ContextIndexSearcher( - reader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - TrivialQueryCachingPolicy.NEVER, - true - ); - SearchContext searchContext = mock(SearchContext.class); - when(searchContext.searcher()).thenReturn(searcher); - SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); - when(searchContext.getSearchExecutionContext()).thenReturn(searchExecutionContext); - when(searchExecutionContext.getFullyQualifiedIndex()).thenReturn(new Index("test", "uid")); - when(searchExecutionContext.getShardId()).thenReturn(shardId); - return searchContext; - } catch (IOException e) { - throw new UncheckedIOException(e); + public static class MockShardContext implements ShardContext { + private final int index; + private final ContextIndexSearcher searcher; + + public MockShardContext(IndexReader reader, int index) { + this.index = index; + try { + this.searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + TrivialQueryCachingPolicy.NEVER, + true + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } + + @Override + public int index() { + return index; + } + + @Override + public IndexSearcher searcher() { + return searcher; + } + + @Override + public Optional buildSort(List> sorts) { + return Optional.empty(); + } + + @Override + public String shardIdentifier() { + return "test"; } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 5776c45274ad1..57f3dd5412ca1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -13,6 +13,10 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSelector; +import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -26,30 +30,23 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import org.junit.After; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); @@ -88,24 +85,14 @@ private LuceneTopNSourceOperator.Factory simple(DataPartitioning dataPartitionin throw new RuntimeException(e); } - SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { - String name = inv.getArgument(0); - return switch (name) { - case "s" -> S_FIELD; - default -> throw new IllegalArgumentException("don't support [" + name + "]"); - }; - }); - when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { - MappedFieldType ft = inv.getArgument(0); - IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); - // This breaker is used for fielddata but we're not testing that. - return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - }); - when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); - when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); - Function queryFunction = c -> new MatchAllDocsQuery(); + ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0) { + @Override + public Optional buildSort(List> sorts) { + SortField field = new SortedNumericSortField("s", SortField.Type.LONG, false, SortedNumericSelector.Type.MIN); + return Optional.of(new SortAndFormats(new Sort(field), new DocValueFormat[] { null })); + } + }; + Function queryFunction = c -> new MatchAllDocsQuery(); int taskConcurrency = 0; int maxPageSize = between(10, Math.max(10, size)); List> sorts = List.of(new FieldSortBuilder("s")); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index ada0582a2fad8..1ba9fa5d1d354 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -65,7 +65,6 @@ import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.TsidExtractingIdFieldMapper; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -86,7 +85,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.compute.lucene.LuceneSourceOperatorTests.mockSearchContext; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; @@ -164,7 +162,7 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv throw new RuntimeException(e); } var luceneFactory = new LuceneSourceOperator.Factory( - List.of(mockSearchContext(reader, 0)), + List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)), ctx -> new MatchAllDocsQuery(), DataPartitioning.SHARD, randomIntBetween(1, 10), @@ -496,6 +494,11 @@ public String indexName() { return "test_index"; } + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return MappedFieldType.FieldExtractPreference.NONE; + } + @Override public SearchLookup lookup() { throw new UnsupportedOperationException(); @@ -1268,7 +1271,7 @@ public void testWithNulls() throws IOException { DriverContext driverContext = driverContext(); var luceneFactory = new LuceneSourceOperator.Factory( - List.of(mockSearchContext(reader, 0)), + List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)), ctx -> new MatchAllDocsQuery(), randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), @@ -1483,10 +1486,10 @@ public void testManyShards() throws IOException { closeMe[d * 2 + 1] = dirs[d] = newDirectory(); closeMe[d * 2] = readers[d] = initIndex(dirs[d], size, between(10, size * 2)); } - List contexts = new ArrayList<>(); + List contexts = new ArrayList<>(); List readerShardContexts = new ArrayList<>(); for (int s = 0; s < shardCount; s++) { - contexts.add(mockSearchContext(readers[s], s)); + contexts.add(new LuceneSourceOperatorTests.MockShardContext(readers[s], s)); readerShardContexts.add(new ValuesSourceReaderOperator.ShardContext(readers[s], () -> SourceLoader.FROM_STORED_SOURCE)); } var luceneFactory = new LuceneSourceOperator.Factory( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index 01f51b32edb1d..4d5a6260ed02d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -79,18 +79,25 @@ public static Page mergePages(List pages) { * Make a deep copy of some pages. Useful so that when the originals are * released the copies are still live. */ - public static List deepCopyOf(List pages) { + public static List deepCopyOf(BlockFactory blockFactory, List pages) { List out = new ArrayList<>(pages.size()); - BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); - for (Page p : pages) { - Block[] blocks = new Block[p.getBlockCount()]; - for (int b = 0; b < blocks.length; b++) { - Block orig = p.getBlock(b); - Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory); - builder.copyFrom(orig, 0, p.getPositionCount()); - blocks[b] = builder.build(); + try { + for (Page p : pages) { + Block[] blocks = new Block[p.getBlockCount()]; + for (int b = 0; b < blocks.length; b++) { + Block orig = p.getBlock(b); + try (Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory)) { + builder.copyFrom(orig, 0, p.getPositionCount()); + blocks[b] = builder.build(); + } + } + out.add(new Page(blocks)); + } + } finally { + if (pages.size() != out.size()) { + // failed to copy all the pages, we're bubbling out an exception. So we have to close the copy. + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(out.iterator(), p -> p::releaseBlocks))); } - out.add(new Page(blocks)); } return out; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index e366646ecd0f5..d2db9c7b48da6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -49,12 +48,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(outputPositionCount, equalTo(Math.min(100, inputPositionCount))); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeFalse("doesn't allocate, just filters", true); - return null; - } - public void testStatus() { BlockFactory blockFactory = driverContext().blockFactory(); LimitOperator op = simple().get(driverContext()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index b67076d635993..87577612068ab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -56,7 +56,7 @@ public class MultivalueDedupeTests extends ESTestCase { public static List supportedTypes() { List supported = new ArrayList<>(); for (ElementType elementType : ElementType.values()) { - if (oneOf(elementType, ElementType.UNKNOWN, ElementType.NULL, ElementType.DOC)) { + if (oneOf(elementType, ElementType.UNKNOWN, ElementType.DOC)) { continue; } supported.add(elementType); @@ -77,9 +77,6 @@ private static boolean oneOf(ElementType elementType, ElementType... others) { public static List params() { List params = new ArrayList<>(); for (ElementType elementType : supportedTypes()) { - if (oneOf(elementType, ElementType.UNKNOWN, ElementType.NULL, ElementType.DOC)) { - continue; - } for (boolean nullAllowed : new boolean[] { false, true }) { for (int max : new int[] { 10, 100, 1000 }) { params.add(new Object[] { elementType, 1000, nullAllowed, 1, max, 0, 0 }); @@ -138,7 +135,7 @@ private BasicBlockTests.RandomBlock randomBlock() { return BasicBlockTests.randomBlock( elementType, positionCount, - nullAllowed, + elementType == ElementType.NULL ? true : nullAllowed, minValuesPerPosition, maxValuesPerPosition, minDupsPerPosition, @@ -164,8 +161,8 @@ private void assertDeduped(BlockFactory blockFactory, BasicBlockTests.RandomBloc } public void testHash() { + assumeFalse("not hash for null", elementType == ElementType.NULL); BasicBlockTests.RandomBlock b = randomBlock(); - switch (b.block().elementType()) { case BOOLEAN -> assertBooleanHash(Set.of(), b); case BYTES_REF -> assertBytesRefHash(Set.of(), b); @@ -177,8 +174,8 @@ public void testHash() { } public void testHashWithPreviousValues() { + assumeFalse("not hash for null", elementType == ElementType.NULL); BasicBlockTests.RandomBlock b = randomBlock(); - switch (b.block().elementType()) { case BOOLEAN -> { Set previousValues = switch (between(0, 2)) { @@ -227,6 +224,7 @@ public void testHashWithPreviousValues() { } public void testBatchEncodeAll() { + assumeFalse("null only direct encodes", elementType == ElementType.NULL); int initCapacity = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes()); BasicBlockTests.RandomBlock b = randomBlock(); var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(b.block(), initCapacity, false); @@ -245,6 +243,7 @@ public void testBatchEncodeAll() { public void testBatchEncoderStartSmall() { assumeFalse("Booleans don't grow in the same way", elementType == ElementType.BOOLEAN); + assumeFalse("Nulls don't grow", elementType == ElementType.NULL); BasicBlockTests.RandomBlock b = randomBlock(); var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(b.block(), 0, false); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 165e5b80b9a58..02517e8fafe1a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; @@ -199,12 +198,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(resultIter2.hasNext(), equalTo(false)); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeFalse("doesn't throw in tests but probably should", true); - return ByteSizeValue.ofBytes(1); - } - public void testNoopStatus() { BlockFactory blockFactory = blockFactory(); MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1000)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 0890ba669f0a2..68a2bde0c2f6c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -97,16 +98,10 @@ public final void testSimpleCircuitBreaking() { DriverContext inputFactoryContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); try { - ByteSizeValue limit = BreakerTestUtil.findBreakerLimit( - memoryLimitForSimple, - l -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), l) - ); + ByteSizeValue limit = BreakerTestUtil.findBreakerLimit(memoryLimitForSimple, l -> runWithLimit(simple, input, l)); ByteSizeValue testWithSize = ByteSizeValue.ofBytes(randomLongBetween(0, limit.getBytes())); logger.info("testing with {} against a limit of {}", testWithSize, limit); - Exception e = expectThrows( - CircuitBreakingException.class, - () -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), testWithSize) - ); + Exception e = expectThrows(CircuitBreakingException.class, () -> runWithLimit(simple, input, testWithSize)); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); } finally { Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); @@ -119,15 +114,16 @@ private void runWithLimit(Operator.OperatorFactory factory, List input, By CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); DriverContext driverContext = new DriverContext(bigArrays, blockFactory); + List localInput = CannedSourceOperator.deepCopyOf(blockFactory, input); boolean driverStarted = false; try { var operator = factory.get(driverContext); driverStarted = true; - drive(operator, input.iterator(), driverContext); + drive(operator, localInput.iterator(), driverContext); } finally { if (driverStarted == false) { // if drive hasn't even started then we need to release the input pages manually - Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(localInput.iterator(), p -> p::releaseBlocks))); } assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index 572657c7c8226..26b9b16d7b24e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; @@ -95,12 +94,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(total, equalTo(input.stream().mapToInt(Page::getPositionCount).sum())); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeTrue("doesn't allocate", false); - return null; - } - public void testDescriptionOfMany() { ProjectOperator.ProjectOperatorFactory factory = new ProjectOperator.ProjectOperatorFactory( IntStream.range(0, 100).boxed().toList() diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 1b1801c63017d..21db023a249f1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.compute.operator.DriverRunner; import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.ReleasableRef; import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; @@ -407,9 +408,9 @@ public void sendResponse(TransportResponse transportResponse) throws IOException } } } - origResp.decRef(); - ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); - super.sendResponse(newResp); + try (var newRespRef = ReleasableRef.of(new ExchangeResponse(page, origResp.finished()))) { + super.sendResponse(newRespRef.get()); + } } }; handler.messageReceived(request, filterChannel, task); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index ba4f547d80ce1..10fecd122672a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -946,6 +946,7 @@ public void testRandomMultiValuesTopN() { Set uniqueOrders = new LinkedHashSet<>(sortingByColumns); List>> expectedValues = new ArrayList<>(rows); List blocks = new ArrayList<>(blocksCount); + boolean[] validSortKeys = new boolean[blocksCount]; List elementTypes = new ArrayList<>(blocksCount); List encoders = new ArrayList<>(blocksCount); @@ -959,6 +960,7 @@ public void testRandomMultiValuesTopN() { () -> randomFrom(ElementType.values()) ); elementTypes.add(e); + validSortKeys[type] = true; try (Block.Builder builder = e.newBlockBuilder(rows, driverContext().blockFactory())) { List previousValue = null; Function randomValueSupplier = (blockType) -> randomValue(blockType); @@ -966,23 +968,22 @@ public void testRandomMultiValuesTopN() { if (rarely()) { randomValueSupplier = switch (randomInt(2)) { case 0 -> { - // use the right BytesRef encoder (don't touch the bytes) + // Simulate ips encoders.add(TopNEncoder.IP); - // deal with IP fields (BytesRef block) like ES does and properly encode the ip addresses yield (blockType) -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); } case 1 -> { - // use the right BytesRef encoder (don't touch the bytes) + // Simulate version fields encoders.add(TopNEncoder.VERSION); - // create a valid Version yield (blockType) -> randomVersion().toBytesRef(); } - default -> { - // use the right BytesRef encoder (don't touch the bytes) + case 2 -> { + // Simulate geo_shape and geo_point encoders.add(DEFAULT_UNSORTABLE); - // create a valid geo_point + validSortKeys[type] = false; yield (blockType) -> randomPointAsWKB(); } + default -> throw new UnsupportedOperationException(); }; } else { encoders.add(UTF8); @@ -1032,10 +1033,16 @@ public void testRandomMultiValuesTopN() { } } - // simulate the LogicalPlanOptimizer.PruneRedundantSortClauses by eliminating duplicate sorting columns (same column, same asc/desc, - // same "nulls" handling) - while (uniqueOrders.size() < sortingByColumns) { - int column = randomIntBetween(0, blocksCount - 1); + /* + * Build sort keys, making sure not to include duplicates. This could + * build fewer than the desired sort columns, but it's more important + * to make sure that we don't include dups + * (to simulate LogicalPlanOptimizer.PruneRedundantSortClauses) and + * not to include sort keys that simulate geo objects. Those aren't + * sortable at all. + */ + for (int i = 0; i < sortingByColumns; i++) { + int column = randomValueOtherThanMany(c -> false == validSortKeys[c], () -> randomIntBetween(0, blocksCount - 1)); uniqueOrders.add(new TopNOperator.SortOrder(column, randomBoolean(), randomBoolean())); } diff --git a/x-pack/plugin/esql/qa/build.gradle b/x-pack/plugin/esql/qa/build.gradle index 0b7d210bcd99e..d3109f6f9b307 100644 --- a/x-pack/plugin/esql/qa/build.gradle +++ b/x-pack/plugin/esql/qa/build.gradle @@ -1,5 +1,5 @@ description = 'Integration tests for ESQL' subprojects { - tasks.withType(Javadoc).all { enabled = false } + tasks.withType(Javadoc).configureEach { enabled = false } } diff --git a/x-pack/plugin/esql/qa/security/build.gradle b/x-pack/plugin/esql/qa/security/build.gradle index 33371320b865d..068a4fd8f4989 100644 --- a/x-pack/plugin/esql/qa/security/build.gradle +++ b/x-pack/plugin/esql/qa/security/build.gradle @@ -1,4 +1,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' +// Necessary to use tests in Serverless +apply plugin: 'elasticsearch.internal-test-artifact' tasks.named('javaRestTest') { usesDefaultDistribution() diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index e363fa64c594d..bb8163915c1c4 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -38,11 +38,10 @@ public class EsqlSecurityIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .nodes(2) .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") .rolesFile(Resource.fromClasspath("roles.yml")) - .user("test-admin", "x-pack-test-password", "test-admin", false) + .user("test-admin", "x-pack-test-password", "test-admin", true) .user("user1", "x-pack-test-password", "user1", false) .user("user2", "x-pack-test-password", "user2", false) .user("user3", "x-pack-test-password", "user3", false) diff --git a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle deleted file mode 100644 index 93eae7d3b9d18..0000000000000 --- a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle +++ /dev/null @@ -1,11 +0,0 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' -// Necessary to use tests in Serverless -apply plugin: 'elasticsearch.internal-test-artifact' - -dependencies { - javaRestTestImplementation project(xpackModule('esql:qa:server')) -} - -tasks.named('javaRestTest') { - usesDefaultDistribution() -} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index fac888dff37fa..9ca7bd2aaf020 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -39,6 +39,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_SOURCE_INDICES; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; @@ -87,7 +88,6 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In @Override protected void shouldSkipTest(String testName) { super.shouldSkipTest(testName); - assumeFalse("CCQ doesn't support enrich yet", hasEnrich(testCase.query)); assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } @@ -120,7 +120,7 @@ static RestClient twoClients(RestClient localClient, RestClient remoteClient) th String endpoint = request.getEndpoint(); if (endpoint.startsWith("/_query")) { return localClient.performRequest(request); - } else if (endpoint.contains("_bulk")) { + } else if (endpoint.endsWith("/_bulk") && ENRICH_SOURCE_INDICES.stream().noneMatch(i -> endpoint.equals("/" + i + "/_bulk"))) { return bulkClient.performRequest(request); } else { Request[] clones = cloneRequests(request, 2); @@ -192,17 +192,6 @@ static CsvSpecReader.CsvTestCase convertToRemoteIndices(CsvSpecReader.CsvTestCas return testCase; } - static boolean hasEnrich(String query) { - String[] commands = query.split("\\|"); - for (int i = 0; i < commands.length; i++) { - commands[i] = commands[i].trim(); - if (commands[i].toLowerCase(Locale.ROOT).startsWith("enrich")) { - return true; - } - } - return false; - } - static boolean hasIndexMetadata(String query) { String[] commands = query.split("\\|"); if (commands[0].trim().toLowerCase(Locale.ROOT).startsWith("from")) { diff --git a/x-pack/plugin/esql/qa/server/multi-node/build.gradle b/x-pack/plugin/esql/qa/server/multi-node/build.gradle index e7ef204d77dbb..2f26003cf7ce4 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-node/build.gradle @@ -1,11 +1,27 @@ apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) javaRestTestImplementation project(xpackModule('esql:qa:server')) + yamlRestTestImplementation project(xpackModule('esql:qa:server')) } tasks.named('javaRestTest') { usesDefaultDistribution() } + +restResources { + restApi { + include '_common', 'bulk', 'get', 'indices', 'esql', 'xpack', 'enrich', 'cluster' + } + restTests { + includeXpack 'esql' + } +} + +tasks.named('yamlRestTest') { + usesDefaultDistribution() + maxParallelForks = 1 +} diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlClientYamlIT.java new file mode 100644 index 0000000000000..5a615def1186f --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlClientYamlIT.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.mixed; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; + +public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return createParameters(); + } + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java new file mode 100644 index 0000000000000..70afdf32d3808 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; + +abstract class AbstractEsqlClientYamlIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected final String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected AbstractEsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Before + @After + private void assertRequestBreakerEmpty() throws Exception { + /* + * This hook is shared by all subclasses. If it is public it we'll + * get complaints that it is inherited. It isn't. Whatever. Making + * it private works - the hook still runs. It just looks strange. + */ + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java new file mode 100644 index 0000000000000..657f396b2857f --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** + * Run the ESQL yaml tests against the async esql endpoint with a 30 minute {@code wait_until_completion_timeout}. + * That's far longer than any should take and far longer than any sensible person will wait, but it's simple + * and it makes sure all the yaml tests work when within the timeout. + */ +public class EsqlClientYamlAsyncIT extends AbstractEsqlClientYamlIT { + public EsqlClientYamlAsyncIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return parameters(doSection -> { + ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query"); + for (Map body : copy.getBodies()) { + body.put("wait_for_completion_timeout", "30m"); + } + doSection.setApiCallSection(copy); + return doSection; + }); + } + + public static Iterable parameters(Function modify) throws Exception { + List result = new ArrayList<>(); + for (Object[] orig : ESClientYamlSuiteTestCase.createParameters()) { + assert orig.length == 1; + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; + try { + ClientYamlTestSection modified = new ClientYamlTestSection( + candidate.getTestSection().getLocation(), + candidate.getTestSection().getName(), + candidate.getTestSection().getPrerequisiteSection(), + candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() + ); + result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); + } + } + return result; + } + + private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { + if (false == (e instanceof DoSection)) { + return e; + } + DoSection doSection = (DoSection) e; + String api = doSection.getApiCallSection().getApi(); + return switch (api) { + case "esql.query" -> modify.apply(doSection); + case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( + "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." + ); + default -> e; + }; + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java new file mode 100644 index 0000000000000..0f2bf2703f62f --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; +import org.elasticsearch.xcontent.XContentLocation; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Run the ESQL yaml tests async and then fetch the results with a long wait time. + */ +public class EsqlClientYamlAsyncSubmitAndFetchIT extends AbstractEsqlClientYamlIT { + public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return EsqlClientYamlAsyncIT.parameters(DoEsqlAsync::new); + } + + private static class DoEsqlAsync implements ExecutableSection { + private final DoSection original; + + private DoEsqlAsync(DoSection original) { + this.original = original; + } + + @Override + public XContentLocation getLocation() { + return original.getLocation(); + } + + @Override + public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { + try { + // Start the query + List> bodies = original.getApiCallSection().getBodies().stream().map(m -> { + Map body = new HashMap<>(m); + if (randomBoolean()) { + /* + * Try to force the request to go async by setting the timeout to 0. + * This doesn't *actually* force the request async - if it finishes + * super duper faster it won't get async. But that's life. + */ + body.put("wait_for_completion_timeout", "0ms"); + } + return body; + }).toList(); + ClientYamlTestResponse startResponse = executionContext.callApi( + "esql.async_query", + original.getApiCallSection().getParams(), + bodies, + original.getApiCallSection().getHeaders(), + original.getApiCallSection().getNodeSelector() + ); + + String id = (String) startResponse.evaluate("id"); + boolean finishedEarly = id == null; + if (finishedEarly) { + /* + * If we finished early, make sure we don't have a "catch" + * param and expect and error. And make sure we match the + * warnings folks have asked for. + */ + original.failIfHasCatch(startResponse); + original.checkWarningHeaders(startResponse.getWarningHeaders(), testPath(executionContext)); + return; + } + + /* + * Ok, we didn't finish before the timeout. Fine, let's fetch the result. + */ + Map params = new HashMap<>(); + params.put("wait_for_completion_timeout", "30m"); + params.put("id", id); + String dropNullColumns = original.getApiCallSection().getParams().get("drop_null_columns"); + if (dropNullColumns != null) { + params.put("drop_null_columns", dropNullColumns); + } + ClientYamlTestResponse fetchResponse = executionContext.callApi( + "esql.async_query_get", + params, + List.of(), + original.getApiCallSection().getHeaders(), + original.getApiCallSection().getNodeSelector() + ); + original.failIfHasCatch(fetchResponse); + original.checkWarningHeaders(fetchResponse.getWarningHeaders(), testPath(executionContext)); + } catch (ClientYamlTestResponseException e) { + original.checkResponseException(e, executionContext); + } + } + + private String testPath(ClientYamlTestExecutionContext executionContext) { + return executionContext.getClientYamlTestCandidate() != null + ? executionContext.getClientYamlTestCandidate().getTestPath() + : null; + } + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java index 5af469c018345..e67ca751298be 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java @@ -9,29 +9,12 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; - -public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } +/** + * Run the ESQL yaml tests against the synchronous API. + */ +public class EsqlClientYamlIT extends AbstractEsqlClientYamlIT { public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } @@ -40,10 +23,4 @@ public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { public static Iterable parameters() throws Exception { return createParameters(); } - - @Before - @After - public void assertRequestBreakerEmpty() throws Exception { - EsqlSpecTestCase.assertRequestBreakerEmpty(); - } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index fd686ec48bb79..18b9206f9b89e 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -13,10 +13,15 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.CsvTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.ql.SpecReader; @@ -28,8 +33,13 @@ import java.net.URL; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Map; +import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; +import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.CsvAssert.assertData; @@ -161,7 +171,38 @@ protected void assertResults( Logger logger ) { assertMetadata(expected, actualColumns, logger); - assertData(expected, actualValues, testCase.ignoreOrder, logger, value -> value == null ? "null" : value.toString()); + assertData(expected, actualValues, testCase.ignoreOrder, logger, EsqlSpecTestCase::valueMapper); + } + + private static Object valueMapper(CsvTestUtils.Type type, Object value) { + if (value == null) { + return "null"; + } + if (type == CsvTestUtils.Type.GEO_POINT || type == CsvTestUtils.Type.CARTESIAN_POINT) { + // Point tests are failing in clustered integration tests because of tiny precision differences at very small scales + if (value instanceof String wkt) { + try { + Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, wkt); + if (geometry instanceof Point point) { + return normalizedPoint(type, point.getX(), point.getY()); + } + } catch (Throwable ignored) {} + } + } + return value.toString(); + } + + private static String normalizedPoint(CsvTestUtils.Type type, double x, double y) { + if (type == CsvTestUtils.Type.GEO_POINT) { + return normalizedGeoPoint(x, y); + } + return String.format(Locale.ROOT, "POINT (%f %f)", (float) x, (float) y); + } + + private static String normalizedGeoPoint(double x, double y) { + x = decodeLongitude(encodeLongitude(x)); + y = decodeLatitude(encodeLatitude(y)); + return String.format(Locale.ROOT, "POINT (%f %f)", x, y); } private Throwable reworkException(Throwable th) { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index c884b123f0f99..02e3c88cb7576 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -148,7 +148,7 @@ public void testNonExistentEnrichPolicy() throws IOException { ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), - containsString("unresolved enrich policy [countris], did you mean [countries]?") + containsString("enrich policy [countris] doesn't exist, did you mean [countries]?") ); } @@ -161,7 +161,7 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), - containsString("unresolved enrich policy [countris], did you mean [countries]?") + containsString("enrich policy [countris] doesn't exist, did you mean [countries]?") ); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 86ec01b7f5266..100895feade16 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -233,7 +233,7 @@ public void testColumnarMode() throws IOException { bulkLoadTestData(docCount); boolean columnar = randomBoolean(); - var query = builder().query(fromIndex() + " | keep keyword, integer"); + var query = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc"); if (columnar || randomBoolean()) { query.columnar(columnar); } @@ -263,28 +263,27 @@ public void testColumnarMode() throws IOException { public void testTextMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("txt", count, null), runEsqlAsTextWithFormat(builder, "txt", null)); } public void testCSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("csv", count, '|'), runEsqlAsTextWithFormat(builder, "csv", '|')); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104195") public void testTSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("tsv", count, null), runEsqlAsTextWithFormat(builder, "tsv", null)); } public void testCSVNoHeaderMode() throws IOException { bulkLoadTestData(1); - var builder = builder().query(fromIndex() + " | keep keyword, integer | limit 100"); + var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); Request request = prepareRequest(SYNC); String mediaType = attachBody(builder.build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); diff --git a/x-pack/plugin/esql/qa/testFixtures/build.gradle b/x-pack/plugin/esql/qa/testFixtures/build.gradle index d313d615a1a42..cf1057452344c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/build.gradle +++ b/x-pack/plugin/esql/qa/testFixtures/build.gradle @@ -23,7 +23,7 @@ dependencies { * If no arguments are specified, the default URL is http://localhost:9200 without authentication. * It also supports HTTPS. */ -task loadCsvSpecData(type: JavaExec) { +tasks.register("loadCsvSpecData", JavaExec) { group = "Execution" description = "Loads ESQL CSV Spec Tests data on a running stand-alone instance" classpath = sourceSets.main.runtimeClasspath diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 49dc585c01753..38bd05d57d768 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -20,6 +20,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -155,7 +156,7 @@ private static void assertMetadata( } static void assertData(ExpectedResults expected, ActualResults actual, boolean ignoreOrder, Logger logger) { - assertData(expected, actual.values(), ignoreOrder, logger, v -> v); + assertData(expected, actual.values(), ignoreOrder, logger, (t, v) -> v); } public static void assertData( @@ -163,7 +164,7 @@ public static void assertData( Iterator> actualValuesIterator, boolean ignoreOrder, Logger logger, - Function valueTransformer + BiFunction valueTransformer ) { assertData(expected, EsqlTestUtils.getValuesList(actualValuesIterator), ignoreOrder, logger, valueTransformer); } @@ -173,7 +174,7 @@ public static void assertData( List> actualValues, boolean ignoreOrder, Logger logger, - Function valueTransformer + BiFunction valueTransformer ) { if (ignoreOrder) { expected.values().sort(resultRowComparator(expected.columnTypes())); @@ -195,16 +196,20 @@ public static void assertData( for (int column = 0; column < expectedRow.size(); column++) { var expectedValue = expectedRow.get(column); var actualValue = actualRow.get(column); + var expectedType = expected.columnTypes().get(column); if (expectedValue != null) { - var expectedType = expected.columnTypes().get(column); // convert the long from CSV back to its STRING form if (expectedType == Type.DATETIME) { expectedValue = rebuildExpected(expectedValue, Long.class, x -> UTC_DATE_TIME_FORMATTER.formatMillis((long) x)); } else if (expectedType == Type.GEO_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbAsString((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.CARTESIAN_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbAsString((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbToWkt((BytesRef) x)); + } else if (expectedType == Type.GEO_SHAPE) { + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbToWkt((BytesRef) x)); + } else if (expectedType == Type.CARTESIAN_SHAPE) { + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.IP) { // convert BytesRef-packed IP to String, allowing subsequent comparison with what's expected expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> DocValueFormat.IP.format((BytesRef) x)); @@ -217,8 +222,8 @@ public static void assertData( } assertEquals( "Row[" + row + "] Column[" + column + "]", - valueTransformer.apply(expectedValue), - valueTransformer.apply(actualValue) + valueTransformer.apply(expectedType, expectedValue), + valueTransformer.apply(expectedType, actualValue) ); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 919ef66456230..4e0f0b8661631 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -61,6 +61,9 @@ public final class CsvTestUtils { private static final int MAX_WIDTH = 20; private static final CsvPreference CSV_SPEC_PREFERENCES = new CsvPreference.Builder('"', '|', "\r\n").build(); private static final String NULL_VALUE = "null"; + private static final char ESCAPE_CHAR = '\\'; + public static final String COMMA_ESCAPING_REGEX = "(?> loadPageFromCsv(URL source) throws Excep record CsvColumn(String name, Type type, BuilderWrapper builderWrapper) implements Releasable { void append(String stringValue) { - if (stringValue.contains(",")) {// multi-value field + if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) { // string value + stringValue = stringValue.substring(1, stringValue.length() - 1).replace(ESCAPED_COMMA_SEQUENCE, ","); + } else if (stringValue.contains(",")) {// multi-value field builderWrapper().builder().beginPositionEntry(); String[] arrayOfValues = delimitedListToStringArray(stringValue, ","); @@ -229,6 +234,8 @@ public void close() { * Takes a csv String and converts it to a String array. Also, it recognizes an opening bracket "[" in one string and a closing "]" * in another string and it creates a single concatenated comma-separated String of all the values between the opening bracket entry * and the closing bracket entry. In other words, entries enclosed by "[]" are returned as a single element. + * + * Commas can be escaped with \ (backslash) character. */ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) { var mvCompressedEntries = new ArrayList(); @@ -237,14 +244,20 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) int pos = 0; // current position in the csv String int commaPos; // current "," character position + int previousCommaPos = 0; while ((commaPos = csvLine.indexOf(",", pos)) != -1 || pos <= csvLine.length()) { + if (commaPos > 0 && csvLine.charAt(commaPos - 1) == ESCAPE_CHAR) {// skip the escaped comma + pos = commaPos + 1;// moving on to the next character after comma + continue; + } + boolean isLastElement = commaPos == -1; - String entry = csvLine.substring(pos, isLastElement ? csvLine.length() : commaPos).trim(); + String entry = csvLine.substring(previousCommaPos, isLastElement ? csvLine.length() : commaPos).trim(); if (entry.startsWith("[")) { if (previousMvValue != null || (isLastElement && entry.endsWith("]") == false)) { String message = "Error line [{}:{}]: Unexpected start of a multi-value field value; current token [{}], " + (isLastElement ? "no closing point" : "previous token [{}]"); - throw new IllegalArgumentException(format(message, lineNumber, pos, entry, previousMvValue)); + throw new IllegalArgumentException(format(message, lineNumber, previousCommaPos, entry, previousMvValue)); } if (entry.endsWith("]")) { if (entry.length() > 2) {// single-valued multivalue field :shrug: @@ -263,7 +276,7 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) format( "Error line [{}:{}]: Unexpected end of a multi-value field value (no previous starting point); found [{}]", lineNumber, - pos, + previousCommaPos, entry ) ); @@ -279,8 +292,8 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) format( "Error line [{}:{}]: Unexpected missing value in a multi-value column; found [{}]", lineNumber, - pos, - csvLine.substring(pos - 1) + previousCommaPos, + csvLine.substring(previousCommaPos - 1) ) ); } @@ -290,12 +303,22 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) } } pos = 1 + (isLastElement ? csvLine.length() : commaPos);// break out of the loop if it reached its last element + previousCommaPos = pos; } return mvCompressedEntries.toArray(String[]::new); } public record ExpectedResults(List columnNames, List columnTypes, List> values) {} + /** + * The method loads a section of a .csv-spec file representing the results of executing the query of that section. + * It reads both the schema (field names and their types) and the row values. + * Values starting with an opening square bracket and ending with a closing square bracket are considered multi-values. Inside + * these multi-values, commas separate the individual values and escaped commas are allowed with a prefixed \ + * default \ (backslash) character. + * @param csv a string representing the header and row values of a single query execution result + * @return data structure with column names, their types and values + */ public static ExpectedResults loadCsvSpecValues(String csv) { List columnNames; List columnTypes; @@ -338,13 +361,21 @@ public static ExpectedResults loadCsvSpecValues(String csv) { if (value.startsWith("[") ^ value.endsWith("]")) { throw new IllegalArgumentException("Incomplete multi-value (opening and closing square brackets) found " + value); } - if (value.contains(",") && value.startsWith("[")) {// commas outside a multi-value should be ok - List listOfMvValues = new ArrayList<>(); - for (String mvValue : delimitedListToStringArray(value.substring(1, value.length() - 1), ",")) { - listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim())); + if (value.contains(",") && value.startsWith("[")) { + // split on commas but ignoring escaped commas + String[] multiValues = value.substring(1, value.length() - 1).split(COMMA_ESCAPING_REGEX); + if (multiValues.length > 0) { + List listOfMvValues = new ArrayList<>(); + for (String mvValue : multiValues) { + listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim().replace(ESCAPED_COMMA_SEQUENCE, ","))); + } + rowValues.add(listOfMvValues); + } else { + rowValues.add(columnTypes.get(i).convert(value.replace(ESCAPED_COMMA_SEQUENCE, ","))); } - rowValues.add(listOfMvValues); } else { + // The value considered here is the one where any potential escaped comma is kept as is (with the escape char) + // TODO if we'd want escaped commas outside multi-values fields, we'd have to adjust this value here as well rowValues.add(columnTypes.get(i).convert(value)); } } @@ -391,8 +422,10 @@ public enum Type { Long.class ), BOOLEAN(Booleans::parseBoolean, Boolean.class), - GEO_POINT(x -> x == null ? null : GEO.stringAsWKB(x), BytesRef.class), - CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.stringAsWKB(x), BytesRef.class); + GEO_POINT(x -> x == null ? null : GEO.wktToWkb(x), BytesRef.class), + CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class), + GEO_SHAPE(x -> x == null ? null : GEO.wktToWkb(x), BytesRef.class), + CARTESIAN_SHAPE(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class); private static final Map LOOKUP = new HashMap<>(); @@ -457,7 +490,7 @@ public static Type asType(ElementType elementType, Type actualType) { } private static Type bytesRefBlockType(Type actualType) { - if (actualType == GEO_POINT || actualType == CARTESIAN_POINT) { + if (actualType == GEO_POINT || actualType == CARTESIAN_POINT || actualType == GEO_SHAPE || actualType == CARTESIAN_SHAPE) { return actualType; } else { return KEYWORD; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 3df70b3b83d37..224f3d34980e6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -42,8 +42,9 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.common.Strings.delimitedListToStringArray; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.CsvTestUtils.COMMA_ESCAPING_REGEX; +import static org.elasticsearch.xpack.esql.CsvTestUtils.ESCAPED_COMMA_SEQUENCE; import static org.elasticsearch.xpack.esql.CsvTestUtils.multiValuesAwareCsvToStringArray; public class CsvTestsDataLoader { @@ -56,31 +57,35 @@ public class CsvTestsDataLoader { private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips", "mapping-clientips.json", "clientips.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); + private static final TestsDataset COUNTRIES_BBOX = new TestsDataset( + "countries_bbox", + "mapping-countries_bbox.json", + "countries_bbox.csv" + ); + private static final TestsDataset COUNTRIES_BBOX_WEB = new TestsDataset( + "countries_bbox_web", + "mapping-countries_bbox_web.json", + "countries_bbox_web.csv" + ); - public static final Map CSV_DATASET_MAP = Map.of( - EMPLOYEES.indexName, - EMPLOYEES, - HOSTS.indexName, - HOSTS, - APPS.indexName, - APPS, - LANGUAGES.indexName, - LANGUAGES, - UL_LOGS.indexName, - UL_LOGS, - SAMPLE_DATA.indexName, - SAMPLE_DATA, - CLIENT_IPS.indexName, - CLIENT_IPS, - AIRPORTS.indexName, - AIRPORTS, - AIRPORTS_WEB.indexName, - AIRPORTS_WEB + public static final Map CSV_DATASET_MAP = Map.ofEntries( + Map.entry(EMPLOYEES.indexName, EMPLOYEES), + Map.entry(HOSTS.indexName, HOSTS), + Map.entry(APPS.indexName, APPS), + Map.entry(LANGUAGES.indexName, LANGUAGES), + Map.entry(UL_LOGS.indexName, UL_LOGS), + Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), + Map.entry(CLIENT_IPS.indexName, CLIENT_IPS), + Map.entry(AIRPORTS.indexName, AIRPORTS), + Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), + Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), + Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); private static final EnrichConfig CLIENT_IPS_ENRICH = new EnrichConfig("clientip_policy", "enrich-policy-clientips.json"); + public static final List ENRICH_SOURCE_INDICES = List.of("languages", "clientips"); public static final List ENRICH_POLICIES = List.of(LANGUAGES_ENRICH, CLIENT_IPS_ENRICH); /** @@ -137,17 +142,33 @@ public static void main(String[] args) throws IOException { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client); + loadDataSetIntoEs(client, (restClient, indexName, indexMapping) -> { + Request request = new Request("PUT", "/" + indexName); + request.setJsonEntity("{\"mappings\":" + indexMapping + "}"); + restClient.performRequest(request); + }); } } + private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class), indexCreator); + } + public static void loadDataSetIntoEs(RestClient client) throws IOException { - loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class)); + loadDataSetIntoEs(client, (restClient, indexName, indexMapping) -> { + ESRestTestCase.createIndex(restClient, indexName, null, indexMapping, null); + }); } public static void loadDataSetIntoEs(RestClient client, Logger logger) throws IOException { + loadDataSetIntoEs(client, logger, (restClient, indexName, indexMapping) -> { + ESRestTestCase.createIndex(restClient, indexName, null, indexMapping, null); + }); + } + + private static void loadDataSetIntoEs(RestClient client, Logger logger, IndexCreator indexCreator) throws IOException { for (var dataSet : CSV_DATASET_MAP.values()) { - load(client, dataSet.indexName, "/" + dataSet.mappingFileName, "/" + dataSet.dataFileName, logger); + load(client, dataSet.indexName, "/" + dataSet.mappingFileName, "/" + dataSet.dataFileName, logger, indexCreator); } forceMerge(client, CSV_DATASET_MAP.keySet(), logger); for (var policy : ENRICH_POLICIES) { @@ -169,7 +190,14 @@ private static void loadEnrichPolicy(RestClient client, String policyName, Strin client.performRequest(request); } - private static void load(RestClient client, String indexName, String mappingName, String dataName, Logger logger) throws IOException { + private static void load( + RestClient client, + String indexName, + String mappingName, + String dataName, + Logger logger, + IndexCreator indexCreator + ) throws IOException { URL mapping = CsvTestsDataLoader.class.getResource(mappingName); if (mapping == null) { throw new IllegalArgumentException("Cannot find resource " + mappingName); @@ -178,14 +206,10 @@ private static void load(RestClient client, String indexName, String mappingName if (data == null) { throw new IllegalArgumentException("Cannot find resource " + dataName); } - createTestIndex(client, indexName, readTextFile(mapping)); + indexCreator.createIndex(client, indexName, readTextFile(mapping)); loadCsvData(client, indexName, data, CsvTestsDataLoader::createParser, logger); } - private static void createTestIndex(RestClient client, String indexName, String mapping) throws IOException { - ESRestTestCase.createIndex(client, indexName, null, mapping, null); - } - public static String readTextFile(URL resource) throws IOException { try (BufferedReader reader = TestUtils.reader(resource)) { StringBuilder b = new StringBuilder(); @@ -198,6 +222,20 @@ public static String readTextFile(URL resource) throws IOException { } @SuppressWarnings("unchecked") + /** + * Loads a classic csv file in an ES cluster using a RestClient. + * The structure of the file is as follows: + * - commented lines should start with "//" + * - the first non-comment line from the file is the schema line (comma separated field_name:ES_data_type elements) + * - sub-fields should be placed after the root field using a dot notation for the name: + * root_field:long,root_field.sub_field:integer + * - a special _id field can be used in the schema and the values of this field will be used in the bulk request as actual doc ids + * - all subsequent non-comment lines represent the values that will be used to build the _bulk request + * - an empty string "" refers to a null value + * - a value starting with an opening square bracket "[" and ending with a closing square bracket "]" refers to a multi-value field + * - multi-values are comma separated + * - commas inside multivalue fields can be escaped with \ (backslash) character + */ private static void loadCsvData( RestClient client, String indexName, @@ -205,7 +243,8 @@ private static void loadCsvData( CheckedBiFunction p, Logger logger ) throws IOException { - Request request = new Request("POST", "/_bulk"); + // The indexName is optional for a bulk request, but we use it for routing in MultiClusterSpecIT. + Request request = new Request("POST", "/" + indexName + "/_bulk"); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = org.elasticsearch.xpack.ql.TestUtils.reader(resource)) { String line; @@ -278,17 +317,27 @@ private static void loadCsvData( if (i > 0 && row.length() > 0) { row.append(","); } - if (entries[i].contains(",")) {// multi-value + // split on comma ignoring escaped commas + String[] multiValues = entries[i].split(COMMA_ESCAPING_REGEX); + if (multiValues.length > 0) {// multi-value StringBuilder rowStringValue = new StringBuilder("["); - for (String s : delimitedListToStringArray(entries[i], ",")) { - rowStringValue.append("\"" + s + "\","); + for (String s : multiValues) { + if (entries[i].startsWith("\"") == false || entries[i].endsWith("\"") == false) { + rowStringValue.append("\"" + s + "\","); + } else { + rowStringValue.append(s + ","); + } } // remove the last comma and put a closing bracket instead rowStringValue.replace(rowStringValue.length() - 1, rowStringValue.length(), "]"); entries[i] = rowStringValue.toString(); } else { - entries[i] = "\"" + entries[i] + "\""; + if (entries[i].startsWith("\"") == false || entries[i].endsWith("\"") == false) { + entries[i] = "\"" + entries[i] + "\""; + } } + // replace any escaped commas with single comma + entries[i] = entries[i].replace(ESCAPED_COMMA_SEQUENCE, ","); row.append("\"" + columns[i] + "\":" + entries[i]); } catch (Exception e) { throw new IllegalArgumentException( @@ -356,4 +405,8 @@ private static XContentParser createParser(XContent xContent, InputStream data) public record TestsDataset(String indexName, String mappingFileName, String dataFileName) {} public record EnrichConfig(String policyName, String policyFileName) {} + + private interface IndexCreator { + void createIndex(RestClient client, String indexName, String mapping) throws IOException; + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 8edcdd9edb124..408f58fb191b5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -145,16 +145,24 @@ public static Map loadMapping(String name) { } public static EnrichResolution emptyPolicyResolution() { - return new EnrichResolution(Set.of(), Set.of()); + return new EnrichResolution(); + } + + public static SearchStats statsForExistingField(String... names) { + return fieldMatchingExistOrMissing(true, names); } public static SearchStats statsForMissingField(String... names) { + return fieldMatchingExistOrMissing(false, names); + } + + private static SearchStats fieldMatchingExistOrMissing(boolean exists, String... names) { return new TestSearchStats() { - private final Set missingFields = Set.of(names); + private final Set fields = Set.of(names); @Override public boolean exists(String field) { - return missingFields.contains(field) == false; + return fields.contains(field) == exists; } }; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv index 8c20e876385a5..1594c6b8f54f0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv @@ -1,892 +1,892 @@ -abbrev:keyword,name:text, scalerank:integer,type:keyword, location:geo_point -LUH, Sahnewal, 9, small, POINT(75.9570722403652 30.8503598561702) -SSE, Solapur, 9, mid, POINT(75.9330597710755 17.625415183635) -IXR, Birsa Munda, 9, mid, POINT(85.3235970368767 23.3177245989962) -AWZ, Ahwaz, 9, mid, POINT(48.7471065435931 31.3431585560757) -GWL, Gwalior, 9, [mid,military], POINT(78.2172186546348 26.285487697937) -HOD, Hodeidah Int'l, 9, mid, POINT(42.97109630194 14.7552534413725) -IDR, Devi Ahilyabai Holkar Int'l, 9, mid, POINT(75.8092915005895 22.727749187571) -ISK, Gandhinagar, 9, mid, POINT(73.8105674924689 19.9660205672806) -IXC, Chandigarh Int'l, 9, [major,military], POINT(76.8017261105242 30.6707248949667) -IXU, Aurangabad, 9, mid, POINT(75.3958432922005 19.8672969621082) -LYP, Faisalabad Int'l, 9, [mid,military], POINT(72.9878190922305 31.3627435480862) -OMS, Omsk Tsentralny, 9, mid, POINT(73.3163595376585 54.9576482934059) -OVB, Novosibirsk Tolmachev, 9, mid, POINT(82.6671524525865 55.0095847136264) -OZH, Zaporozhye Int'l, 9, [mid,military], POINT(35.3018728575279 47.8732635579023) -PKU, Simpang Tiga, 9, mid, POINT(101.446569298441 0.464600872998505) -ROP, Rota Int'l, 9, mid, POINT(145.243980298582 14.1717712971216) -SGC, Surgut, 9, mid, POINT(73.4084964764375 61.3401672194481) -TRZ, Tiruchirappalli, 9, mid, POINT(78.7089578747476 10.7603571306554) -TUK, Turbat Int'l, 9, mid, POINT(63.0279333519181 25.988794590011) -UET, Quetta Int'l, 9, mid, POINT(66.9487311480949 30.249043186181) -ZAH, Zahedan Int'l, 9, mid, POINT(60.900708564915 29.4752941956573) -MLG, Abdul Rachman Saleh, 9, [mid,military], POINT(112.711418617258 -7.92998002840567) -BAX, Barnaul, 9, mid, POINT(83.5504532124038 53.3633850813046) -VIAX, Adampur, 9, [military,mid], POINT(75.7584828456005 31.4329422397715) -VIBY, Bareilly, 9, military, POINT(79.452002687657 28.4218087161144) -OPQS, Dhamial, 9, small, POINT(73.0320498392002 33.5614146278861) -CJJ, Cheongju Int'l, 9, major, POINT(127.495916124681 36.7220227766673) -KWJ, Gwangju, 9, [mid,military], POINT(126.810839481226 35.1400051390198) -TAE, Daegu Int'l, 9, mid, POINT(128.637537699933 35.8999277969087) -USN, Ulsan, 9, mid, POINT(129.355731047528 35.5928957527107) -WIIT, Radin Inten II, 9, mid, POINT(105.176060419161 -5.242566777132) -IXD, Allahabad, 9, military, POINT(81.7317271462187 25.443522027821) -CEK, Chelyabinsk, 9, mid, POINT(61.5122589740201 55.2977919496055) -TNN, Tainan, 8, [military,mid], POINT(120.209733318093 22.950667918347) -RMQ, Taichung, 8, [military,mid], POINT(120.630703547584 24.2666555567115) -RTM, Rotterdam The Hague, 8, mid, POINT(4.43384434962876 51.9491301899382) -VOZ, Voronezh-Chertovitskoye, 8, mid, POINT(39.2254496447973 51.8126171268344) -LPL, Liverpool John Lennon, 8, major, POINT(-2.85862065784938 53.3363751054422) -VTZ, Vishakapatnam, 8, mid, POINT(83.2235216387465 17.7279577384364) -UPG, Sultan Hasanuddin Int'l, 8, major, POINT(119.545691342151 -5.05893689455779) -VAV, Vava'u Int'l, 8, mid, POINT(-173.968093944159 -18.5860058550654) -NCL, Newcastle Int'l, 8, major, POINT(-1.71034578407216 55.037084860802) -LCE, Goloson Int'l, 8, mid, POINT(-86.8514685020011 15.7451596659126) -MED, Madinah Int'l, 8, major, POINT(39.6991359560417 24.5442339605661) -YMX, Mirabel Int'l, 8, mid, POINT(-74.0287382984814 45.6832250979267) -PLQ, Palanga Int'l, 8, mid, POINT(21.0974463986251 55.9713426235358) -JAI, Jaipur Int'l, 8, mid, POINT(75.8010104192668 26.8211798100605) -IXW, Sonari, 8, mid, POINT(86.1724662363776 22.8154145110242) -YEI, Yenisehir, 8, mid, POINT(29.54492 40.2555395007473) -ADA, Şakirpaşa, 8, major, POINT(35.2969614268338 36.9852090641795) -ADQ, Kodiak, 8, mid, POINT(-152.485638515235 57.7485921070483) -AMA, Amarillo Int'l, 8, major, POINT(-101.705352772697 35.2184031919398) -ASP, Alice Springs, 8, mid, POINT(133.902918 -23.801968) -ATQ, Raja Sansi Int'l, 8, [mid,military], POINT(74.8071559719824 31.7068220258888) -BBI, Biju Patnaik, 8, mid, POINT(85.8168899544429 20.2526659754734) -BET, Bethel, 8, mid, POINT(-161.83898695944 60.7787379834088) -BGA, Palonegro, 8, mid, POINT(-73.1809207725361 7.12770915402685) -BHM, Birmingham Int'l, 8, major, POINT(-86.7523773615462 33.5618672828058) -BHQ, Broken Hill, 8, mid, POINT(141.470407303097 -31.998996737463) -BIL, Logan Int'l, 8, major, POINT(-108.536929388125 45.8036855715278) -BIS, Bismarck Muni., 8, mid, POINT(-100.757471303717 46.7751066661614) -BJX, Del Bajio Int'l, 8, mid, POINT(-101.478753382467 20.9858871211938) -BNI, Benin, 8, mid, POINT(5.603682560067 6.31716689207818) -BOJ, Bourgas, 8, major, POINT(27.5164093662953 42.5670835487702) -BRE, Bremen, 8, major, POINT(8.7858617703132 53.052287104156) -BRM, Jacinto Lara Int'l, 8, mid, POINT(-69.356102 10.0453) -BRO, Brownsville-South Padre Island Int'l, 8, mid, POINT(-97.431765340232 25.9062743545347) -BRS, Bristol Int'l, 8, major, POINT(-2.71086469134308 51.3862934189148) -BTR, Baton Rouge Metro, 8, major, POINT(-91.1567544048105 30.5326138040586) -BTS, Bratislava-M.R. Štefánik, 8, major, POINT(17.1999850022208 48.1698379062535) -BTV, Burlington Int'l, 8, mid, POINT(-73.1550787790668 44.4692066040732) -CAE, Columbia Metro, 8, major, POINT(-81.1093352429377 33.9342054584275) -CCJ, Calicut Int'l, 8, major, POINT(75.950993063051 11.1395520526064) -CCK, Cocos (Keeling) Islands, 8, mid, POINT(96.8287472144207 -12.1851585953293) -CFU, Corfu Int'l (Ioannis Kapodistrias), 8, mid, POINT(19.9147561641662 39.6067554505259) -CGQ, Changchun Longjia Int'l, 8, major, POINT(125.690456812998 43.993011479577) -CHS, Charleston Int'l, 8, [major,military], POINT(-80.0369337438262 32.8845301562965) -CJB, Coimbatore, 8, mid, POINT(77.038893772262 11.0301415125983) -CLJ, Someseni, 8, mid, POINT(23.6869812680486 46.7826626340243) -CMW, Ignacio Agramonte, 8, mid, POINT(-77.8451039935167 21.4247037281961) -CPR, Casper/Natrona County Int'l, 8, major, POINT(-106.464444809692 42.8971900483006) -CRK, Clark Int'l, 8, major, POINT(120.550770223914 15.1876422423888) -CRW, Yeager, 8, [major,military], POINT(-81.5964164667526 38.3705914372865) -CTA, Catania Fontanarossa, 8, major, POINT(15.0674605007053 37.470072800341) -CTM, Chetumal Int'l, 8, mid, POINT(-88.3242600415365 18.506434233376) -CWL, Cardiff, 8, major, POINT(-3.33956876429118 51.3986220911017) -CYB, Gerrard Smith, 8, mid, POINT(-79.879461638003 19.6898653962844) -CZM, Cozumel Int'l, 8, mid, POINT(-86.9304064070436 20.5115543771647) -DAY, James M. Cox Dayton Int'l, 8, major, POINT(-84.2204594238102 39.8990402865362) -DBO, Dubbo, 8, mid, POINT(148.569717 -32.218681) -DCA, Washington Nat'l, 8, major, POINT(-77.0433373925631 38.8537162012123) -DGO, Durango Int'l, 8, mid, POINT(-104.533846024964 24.1261948326182) -DNK, Voloskoye, 8, mid, POINT(35.0939060224975 48.3675718021117) -DOK, Donetsk, 8, major, POINT(37.7448085572103 48.0691671285582) -DZO, Santa Bernardina Int'l, 8, mid, POINT(-56.4992636213744 -33.3591084475501) -EDI, Edinburgh Int'l, 8, major, POINT(-3.36428468513554 55.9485540113639) -EIS, Terrance B. Lettsome Int'l, 8, mid, POINT(-64.5371514365794 18.4443618557983) -EKO, Elko Reg., 8, mid, POINT(-115.786479232249 40.8276058815225) -ESE, Ensenada, 8, mid, POINT(-116.595724400418 31.7977139760569) -FAE, Vágar, 8, mid, POINT(-7.2708 62.0625) -FAR, Hector Int'l, 8, [mid,military], POINT(-96.8254561269675 46.9198178811323) -FAT, Fresno Yosemite Int'l, 8, mid, POINT(-119.720001323576 36.7698128373959) -FLG, Flagstaff Pulliam, 8, mid, POINT(-111.674656171675 35.1389116757976) -FRS, Mundo Maya Int'l, 8, mid, POINT(-89.8778404226508 16.9149741642226) -FSD, Sioux Falls Reg., 8, mid, POINT(-96.7313831017541 43.5801934972763) -GEG, Spokane Int'l, 8, major, POINT(-117.536836628585 47.6254781278368) -GGT, Exuma Int'l, 8, mid, POINT(-75.872613085304 23.5638829069259) -GIB, Gibraltar, 8, mid, POINT(-5.34677180033388 36.1512747504173) -GRR, Gerald R. Ford Int'l, 8, mid, POINT(-85.529573207274 42.8847776020908) -GSO, Triad Int'l, 8, major, POINT(-79.9364867577484 36.1053781998932) -GTF, Great Falls Int'l, 8, mid, POINT(-111.35668472784 47.482270729263) -GZT, Gaziantep Oğuzeli Int'l, 8, major, POINT(37.47380325219 36.9453633446875) -HBX, Hubli, 8, mid, POINT(75.0863155680281 15.3591833386229) -HDY, Hat Yai Int'l, 8, mid, POINT(100.393751274671 6.93634231940664) -HFE, Hefei-Luogang, 8, mid, POINT(117.304197015888 31.7798576795778) -HRG, Hurghada Int'l, 8, major, POINT(33.8071606414118 27.1804260918186) -HRK, Kharkov Int'l, 8, major, POINT(36.2822010773503 49.9215360631551) -HSV, Huntsville Int'l, 8, major, POINT(-86.7749430563373 34.6483344609319) -IBA, Ibadan, 8, mid, POINT(3.9738133433229 7.36034397269393) -ICT, Kansas City Int'l, 8, major, POINT(-97.4287387683976 37.6529279603903) -ILM, Wilmington Int'l, 8, mid, POINT(-77.9103756560469 34.2667840671996) -ILR, Ilorin Int'l, 8, mid, POINT(4.49484038819934 8.43537651935241) -INL, Falls Int'l, 8, mid, POINT(-93.3980027552794 48.5659930848414) -INV, Inverness, 8, mid, POINT(-4.06359175587141 57.5395002923424) -IPL, Imperial Cty., 8, mid, POINT(-115.57199556658 32.8339586685524) -IXJ, Jammu, 8, mid, POINT(74.8423077638915 32.6810428886225) -IXM, Madurai, 8, mid, POINT(78.0911394937194 9.83718627877566) -JDH, Jodhpur, 8, [major,military], POINT(73.0505491895671 26.2637623458351) -JLR, Jabalpur, 8, mid, POINT(80.0587438885277 23.1845328746465) -JRO, Kilimanjaro Int'l, 8, mid, POINT(37.0651896067748 -3.42444495998178) -KAD, Kaduna, 8, mid, POINT(7.32525347407434 10.6946192862391) -KGA, Kananga, 8, mid, POINT(22.4783332482689 -5.90016656227041) -KMS, Kumasi, 8, mid, POINT(-1.59257526582361 6.71460638750365) -KNA, Viña del Mar, 8, mid, POINT(-71.4806025354969 -32.948391765136) -KNU, Kanpur, 8, mid, POINT(80.3675338772002 26.4388334467042) -KOA, Kona Int'l at Keahole, 8, mid, POINT(-156.040889471058 19.7370991399442) -KOI, Kirkwall, 8, mid, POINT(-2.90137849524508 58.9544288788303) -KTU, Kota, 8, mid, POINT(75.8504977944552 25.1634187166743) -KYA, Konya, 8, [major,military], POINT(32.5756732669687 37.9839945531695) -LEX, Blue Grass, 8, major, POINT(-84.5982681918786 38.0374273181372) -LIH, Lihue, 8, mid, POINT(-159.349084290522 21.9781243162088) -LIT, Clinton National, 8, major, POINT(-92.2205881319289 34.7284300415179) -LMM, Los Mochis, 8, mid, POINT(-109.082694645261 25.688508826099) -LOV, Venustiano Carranza Int'l, 8, mid, POINT(-101.464960031751 26.9553927160699) -LRD, Laredo Int'l, 8, mid, POINT(-99.4556603976513 27.5436657175825) -LSI, Sumburgh, 8, mid, POINT(-1.28806068838753 59.8766899598999) -LTK, Bassel Al-Assad Int'l, 8, major, POINT(35.9442407096663 35.4073114596744) -LTN, London Luton, 8, major, POINT(-0.376227267397439 51.8802952570969) -LYR, Svalbard Longyear, 8, mid, POINT(15.495229 78.246717) -MBJ, Sangster Int'l, 8, mid, POINT(-77.9183907635752 18.5011549298249) -MDL, Mandalay Int'l, 8, mid, POINT(95.9706535950217 21.7055490680274) -MDW, Chicago Midway Int'l, 8, major, POINT(-87.7421266885612 41.7883492597409) -MEC, Eloy Alfaro Int'l, 8, [mid,military], POINT(-80.6833845995774 -0.949557002112883) -MGM, Montgomery Reg., 8, major, POINT(-86.3903074602686 32.3045879909631) -MHT, Manchester-Boston Reg., 8, major, POINT(-71.4375239091857 42.9279139945886) -DNMA, Maiduguri Int'l, 8, mid, POINT(13.0851390162471 11.8534713188527) -MJM, Mbuji Mayi, 8, mid, POINT(23.5721091989052 -6.12484541348812) -MOT, Minot Int'l, 8, mid, POINT(-101.2913855313 48.2556049212839) -MSO, Missoula Int'l, 8, mid, POINT(-114.083694923651 46.9187604768831) -MXL, Gen R.S. Taboada Int'l, 8, mid, POINT(-115.247874047841 32.6285643324607) -MXP, Malpensa, 8, major, POINT(8.71295953502437 45.6274405140381) -NLK, Norfolk Island, 8, mid, POINT(167.943394116205 -29.0351592555275) -NUE, Nurnberg, 8, major, POINT(11.0774179739096 49.4945052170345) -ODS, Odessa Int'l, 8, major, POINT(30.6768308310206 46.4406268759106) -OOL, Gold Coast, 8, mid, POINT(153.512876264303 -28.1665168540202) -ORN, Oran Es Senia, 8, mid, POINT(-0.60679696443112 35.6202747312734) -PAT, Lok Nayak Jaiprakash, 8, mid, POINT(85.0909021314663 25.5944434295605) -PDU, Paysandu, 8, mid, POINT(-58.0685346825257 -32.3614545292723) -PFO, Paphos Int'l, 8, major, POINT(32.4832322064926 34.7134012817335) -PLM, Sultan Mahmud Badaruddin II, 8, mid, POINT(104.699128326762 -2.89999345005997) -PTG, Polokwane Int'l, 8, mid, POINT(29.4533403645644 -23.858986270166) -PUJ, Punta Cana, 8, mid, POINT(-68.3632351074649 18.563039033987) -QRO, Queretaro Int'l, 8, mid, POINT(-100.18735943003 20.622466071278) -RAJ, Rajkot, 8, mid, POINT(70.7799548311565 22.3092816988361) -RIC, Richmond Int'l, 8, major, POINT(-77.333119638113 37.5082899750901) -RJH, Shah Makhdum, 8, mid, POINT(88.6138045704431 24.4448068623035) -ROC, Greater Rochester Int'l, 8, major, POINT(-77.6652445062197 43.1275519826482) -ROK, Rockhampton, 8, mid, POINT(150.478897 -23.378599) -ROV, Rostov-on-Don, 8, mid, POINT(39.8035144445391 47.2551119519754) -RTW, Saratov, 8, mid, POINT(46.035023249891 51.5606456508842) -SAP, Ramón Villeda Morales Int'l, 8, mid, POINT(-87.9272365125409 15.4558630524883) -SBA, Santa Barbara Muni., 8, mid, POINT(-119.8366015808 34.4257312978783) -SCC, Deadhorse, 8, mid, POINT(-148.457855 70.19751) -SFJ, Kangerlussuaq, 8, mid, POINT(-50.694199 67.018097) -SGF, Springfield Reg., 8, major, POINT(-93.3826379012003 37.2421444903024) -SHV, Shreveport Reg., 8, major, POINT(-93.8285222229503 32.4545798866513) -SIP, Simferopol Int'l, 8, major, POINT(33.9960529244537 45.0202173978165) -SIT, Sitka Rocky Gutierrez, 8, mid, POINT(-135.365692 57.05349) -SJD, Los Cabos Int'l, 8, major, POINT(-109.717858386909 23.1626574483597) -SLE, McNary Field, 8, major, POINT(-123.007871479404 44.9105138452142) -SLW, Plan de Guadalupe, 8, mid, POINT(-100.932260548587 25.5479976419974) -SNN, Shannon, 8, major, POINT(-8.92242885557686 52.6934537102532) -SON, Santo Pekoa Int'l, 8, mid, POINT(167.220894919375 -15.5055387370858) -SRG, Achmad Yani, 8, mid, POINT(110.378556255666 -6.97873484956982) -SXR, Srinagar, 8, [military,mid], POINT(74.7826243672311 33.9830909431623) -TAP, Tapachula Int'l, 8, mid, POINT(-92.370003 14.7911281338773) -TGD, Podgorica, 8, major, POINT(19.2466868618873 42.3679335195428) -TLH, Tallahassee Reg., 8, major, POINT(-84.3449953984858 30.3955576176938) -TRN, Turin Int'l, 8, major, POINT(7.64416230362133 45.1916600734642) -TYN, Taiyuan Wusu Int'l, 8, major, POINT(112.625891539315 37.7545117791512) -UAK, Narsarsuaq, 8, mid, POINT(-45.4164008923108 61.1625968337328) -UTP, U-Tapao, 8, [military,mid], POINT(101.00020929048 12.6852930912664) -VFA, Victoria Falls, 8, mid, POINT(25.8467677208826 -18.0990155983682) -VGA, Vijaywada, 8, mid, POINT(80.7973080000675 16.528642778235) -VNS, Varanasi, 8, mid, POINT(82.8538741913527 25.4499077329822) -VRA, Juan Gualberto Gomez, 8, major, POINT(-81.4367103850623 23.0395422339631) -VSA, Villahermosa, 8, mid, POINT(-92.8190675836262 17.9930660113111) -YBR, Brandon, 8, mid, POINT(-99.9458959002463 49.9047279410277) -YED, CFB Edmonton, 8, [military,major], POINT(-113.478839054497 53.6749156618668) -YFB, Iqaluit, 8, mid, POINT(-68.5367292441812 63.7511523537807) -YHM, John C. Munro Hamilton Int'l, 8, mid, POINT(-79.9264230959967 43.1633605305096) -YMM, Fort McMurray, 8, mid, POINT(-111.223840046617 56.6563171390962) -YNT, Yantai, 8, [major,military], POINT(121.372047417773 37.4077044726924) -YPE, Peace River, 8, mid, POINT(-117.443663208082 56.231924036745) -YQM, Greater Moncton Int'l, 8, mid, POINT(-64.6886696807361 46.1162059639259) -YQY, Sydney/J.A. Douglas McCurdy, 8, mid, POINT(-60.0469372117026 46.1673405890504) -YRB, Resolute Bay, 8, mid, POINT(-94.9708023244006 74.7181860987594) -YSM, Fort Smith, 8, mid, POINT(-111.961059938158 60.0198749602443) -YTH, Thompson, 8, mid, POINT(-97.860733 55.797482) -YTS, Timmins, 8, mid, POINT(-81.372047 48.566158) -YUT, Repulse Bay, 8, mid, POINT(-86.25 66.533302) -YVP, Kuujjuaq, 8, mid, POINT(-68.433342 58.101959) -YWK, Wabush, 8, mid, POINT(-66.873009 52.926071) -YXD, Edmonton City Centre, 8, mid, POINT(-113.522973688581 53.5709436582812) -YXJ, Fort St. John (N. Peace), 8, mid, POINT(-120.736439 56.246035) -YYB, North Bay/Jack Garland, 8, mid, POINT(-79.42491 46.358711) -ZAR, Zaria, 8, mid, POINT(7.68726764310577 11.1352958601071) -SKP, Skopje, 8, mid, POINT(21.6281971858229 41.9564546081544) -VE23, Burnpur, 8, mid, POINT(86.974546776573 23.6312179107764) -VIDX, Hindon Air Force Station, 8, mid, POINT(77.3507888779117 28.7077968601071) -, Sunchon, 8, major, POINT(125.890825057486 39.4119659710565) -EPLL, Łódź Władysław Reymont, 8, mid, POINT(19.4032148744037 51.72720704517) -BXJ, Alma Ata N.W., 8, [mid,military], POINT(76.8782640096648 43.3554190837919) -JMU, Jiamusi Dongjiao, 8, mid, POINT(130.456204704407 46.8430150223379) -MDG, Mudanjiang Hailang, 8, major, POINT(129.58015153222 44.5342936299935) -ULMM, Severomorsk-3 (Murmansk N.E.), 8, [military,major], POINT(33.2903527616285 69.0168711826804) -OSB, Mosul Int'l, 8, mid, POINT(43.145802 36.308601) -, Rostov N., 8, [military,mid], POINT(39.6353996343665 47.2774209202867) -, Rostov S.W., 8, mid, POINT(39.7972215345149 47.1158577255835) -OUL, Oulu, 8, mid, POINT(25.3728374704307 64.9287992358849) -BOD, Bordeaux, 8, major, POINT(-0.701793449075243 44.8321108662674) -CEQ, Mandelieu, 8, mid, POINT(6.95431612028937 43.546097987045) -DOL, St Gatien, 8, mid, POINT(0.158653528230218 49.3616609986609) -LIL, Lille-Lesquin, 8, mid, POINT(3.10596499799813 50.5716423929581) -TLS, Toulouse-Blagnac, 8, major, POINT(1.37350918551153 43.6304625661601) -FUK, Fukuoka, 8, major, POINT(130.444189541884 33.5848164332573) -HIW, Hiroshima-Nishi, 8, mid, POINT(132.419372741681 34.3713815628829) -NKM, Nagoya, 8, mid, POINT(136.91962838414 35.2540532052867) -SDJ, Sendai, 8, mid, POINT(140.930247381369 38.1382075615287) -KKN, Kirkenes Hoybuktmoen, 8, mid, POINT(29.8913489500406 69.7238318113692) -CGB, Marechal Rondon Int'l, 8, mid, POINT(-56.1201774754724 -15.6511470191955) -FLN, Hercilio Luz Int'l, 8, major, POINT(-48.5448122049599 -27.6646276941638) -JOI, Joinville-Lauro C. de Loyola, 8, mid, POINT(-48.8016498165616 -26.2242941374785) -JPA, Presidente Castro Pinto Int'l, 8, mid, POINT(-34.9488925911125 -7.14617462402047) -NAT, Augusto Severo Int'l, 8, major, POINT(-35.2488410165389 -5.89912054477116) -OPO, Francisco Sa Carneiro, 8, major, POINT(-8.67127240719647 41.2368708920452) -SLZ, Marechal Cunha Machado Int'l, 8, mid, POINT(-44.2362344700492 -2.58350921043019) -SSZ, Santos Air Force Base, 8, [military,mid], POINT(-46.3052704931003 -23.9237590410637) -THE, Teresina-Senador Petronio Portella, 8, mid, POINT(-42.8212402317845 -5.06346299167191) -VCP, Viracopos-Campinas Int'l, 8, mid, POINT(-47.1410791911014 -23.0096239085339) -VIX, Eurico de Aguiar Salles, 8, mid, POINT(-40.2885368759913 -20.2574162759418) -ALC, Alicante, 8, major, POINT(-0.557230440363588 38.2866408993929) -LEI, Almeria, 8, mid, POINT(-2.3716014405912 36.8477672709643) -VLC, Valencia, 8, mid, POINT(-0.473474930771676 39.4914597884489) -KRN, Kiruna_Airport, 8, mid, POINT(20.3351522954898 67.8256066056432) -NRK, Norrköping Airport, 8, major, POINT(16.2339407695814 58.5833805017541) -BDO, Husein Sastranegara Int'l, 8, mid, POINT(107.575611852209 -6.90042408353409) -ROS, Rosario – Islas Malvinas Int'l, 8, mid, POINT(-60.7800787216586 -32.9162269743812) -MCZ, Maceio/Zumbi dos Palmares Int'l, 8, mid, POINT(-35.7924951215833 -9.51494118540116) -SSH, Sharm el-Sheikh Int'l, 8, mid, POINT(34.3901189267288 27.9804044199168) -TCP, Taba Int'l, 8, mid, POINT(34.7758378996779 29.5944990568019) -AGR, Agra, 8, [major,military], POINT(77.960909176509 27.15772773475) -BDQ, Vadodara, 8, mid, POINT(73.2262889533239 22.3361640021171) -KSH, Shahid Ashrafi Esfahani, 8, mid, POINT(47.1565835165639 34.3464167739108) -BEN, Benina Int'l, 8, mid, POINT(20.2680398018516 32.0872774606553) -DHA, King Abdulaziz AB, 8, [military,major], POINT(50.1477245727844 26.2703680854768) -STY, Nueva Hespérides Int'l, 8, mid, POINT(-57.9840821176492 -31.4373883387798) -BAIK, Baikonur Cosmodrome, 8, spaceport, POINT(63.307354423875 45.9635739403124) -KSC, Kennedy Space Center, 8, spaceport, POINT(-80.6369680911892 28.5163704772027) -CSG, Centre Spatial Guyanais, 8, spaceport, POINT(-52.7684296893452 5.23941001258035) -AUA, Queen Beatrix Int'l, 7, mid, POINT(-70.0076228563496 12.5034643630297) -JIB, Djibouti-Ambouli Int'l, 7, mid, POINT(43.1497127859956 11.5521018230172) -IQQ, Diego Aracena Int'l, 7, [mid,military], POINT(-70.178635395533 -20.5478400878309) -SAW, Sabiha Gökçen Havaalani, 7, major, POINT(29.3095991423889 40.9043003553957) -KSA, Kosrae Island, 7, mid, POINT(162.957041225076 5.3520098571828) -FUN, Funafuti Int'l, 7, mid, POINT(179.19544202302 -8.52485415059424) -NAG, Dr. Babasaheb Ambedkar Int'l, 7, mid, POINT(79.0537976421986 21.0899317630087) -HKT, Phuket Int'l, 7, mid, POINT(98.3060384900559 8.10768475952735) -NAN, Nadi Int'l, 7, mid, POINT(177.451151198059 -17.7529129479792) -AGU, Lic. Jesús Terán Peredo Int'l, 7, mid, POINT(-102.314093740058 21.7013390329207) -ALL, Albenga, 7, mid, POINT(8.12314535436409 44.0458773598158) -AMM, Queen Alia Int'l, 7, major, POINT(35.989707162193 31.7226621600432) -ARI, Chacalluta Int'l, 7, mid, POINT(-70.3357301410959 -18.3492061639579) -ATR, Atar Int'l, 7, mid, POINT(-13.0511704323315 20.4982706101565) -BAQ, Ernesto Cortissoz Int'l, 7, mid, POINT(-74.776555978265 10.8866775959414) -BRC, Teniente Luis Candelaria Int'l, 7, mid, POINT(-71.1614300869763 -41.1459976958105) -BYK, Bouaké, 7, mid, POINT(-5.06894222275311 7.73610495555032) -BZE, Philip S. W. Goldson Int'l, 7, major, POINT(-88.3082064033075 17.5360686575521) -CRP, Corpus Christi Int'l, 7, major, POINT(-97.5022678710298 27.7744560700823) -CUR, Hato Int'l, 7, mid, POINT(-68.9568788072761 12.1848346052019) -CUZ, Velazco Astete Int'l, 7, major, POINT(-71.9436641449722 -13.5382186992639) -DAR, Julius Nyerere Int'l, 7, mid, POINT(39.2074715039165 -6.86672004249119) -DET, Detroit City, 7, mid, POINT(-83.0039681417733 42.4090938431907) -DIL, Presidente Nicolau Lobato Int'l, 7, mid, POINT(125.524854209182 -8.54931157414564) -DME, Moscow Domodedovo Int'l, 7, major, POINT(37.9002531289452 55.4141528223023) -DUD, Dunedin Int'l, 7, mid, POINT(170.200027 -45.923431) -DZA, Dzaoudzi Pamanzi Int'l, 7, mid, POINT(45.2817864197899 -12.8049474381643) -ELP, El Paso Int'l, 7, mid, POINT(-106.395714679366 31.7990860272589) -EVN, Zvartnots Int'l, 7, major, POINT(44.4000630536938 40.1523679451884) -FTW, Fort Worth Meacham Field, 7, major, POINT(-97.3551348561587 32.8207529047972) -GDT, JAGS McCartney Int'l, 7, mid, POINT(-71.1461337448876 21.4421237439063) -GLS, Scholes Int'l, 7, mid, POINT(-94.8554013876264 29.2671239212096) -GOM, Goma Int'l, 7, mid, POINT(29.2400534952228 -1.6583179500207) -GOU, Garoua Int'l, 7, mid, POINT(13.3724309377878 9.33068867678854) -GUM, Antonio B. Won Pat Int'l, 7, major, POINT(144.805850357093 13.4926462359465) -GYY, Gary/Chicago Int'l, 7, mid, POINT(-87.4083596247406 41.6177930015166) -HAH, Prince Said Ibrahim Int'l, 7, mid, POINT(43.2745612179616 -11.5366393829127) -HBA, Hobart Int'l, 7, mid, POINT(147.505996190408 -42.8376083694822) -HIR, Honiara Int'l, 7, mid, POINT(160.045855129925 -9.42757566400146) -IEV, Kiev Zhuliany Int'l, 7, mid, POINT(30.4451305182104 50.412808165985) -IKT, Irkutsk S.E., 7, [mid,military], POINT(104.355859748002 52.2728893882244) -IND, Indianapolis Int'l, 7, major, POINT(-86.2734003650885 39.7302043703969) -INU, Nauru Int'l, 7, mid, POINT(166.91613965882 -0.545037226856384) -IPC, Mataveri Int'l, 7, mid, POINT(-109.43006441001 -27.1587738388538) -JUJ, Gob. Horacio Guzman Int'l, 7, mid, POINT(-65.0937665458812 -24.3861010775846) -KHN, Nanchang Changbei Int'l, 7, mid, POINT(115.911979918602 28.8624891200666) -KMG, Kunming Wujiaba Int'l, 7, major, POINT(102.742117578823 24.999996110081) -LBA, Leeds Bradford, 7, major, POINT(-1.65983106734746 53.8690819474434) -LBV, Libreville Leon M'ba Int'l, 7, mid, POINT(9.41022337820712 0.457139229503759) -LFW, Lomé Tokoin, 7, mid, POINT(1.25093205640014 6.16687362722297) -LWO, Lviv Danylo Halytskyi Int'l, 7, [mid,military], POINT(23.9461269598944 49.8178506050005) -MAJ, Marshall Islands Int'l, 7, mid, POINT(171.281919370648 7.06811848557091) -MFM, Macau Int'l, 7, major, POINT(113.57451294862 22.1576572529634) -MGQ, Aden Adde Int'l, 7, mid, POINT(45.3036374186202 2.01635311214988) -MPM, Maputo Int'l, 7, mid, POINT(32.5741915194782 -25.924276711787) -MRU, Sir Seewoosagur Ramgoolam Int'l, 7, mid, POINT(57.6769860076636 -20.4317567793216) -NAP, Naples Int'l, 7, major, POINT(14.2828444340203 40.8780728843639) -NDB, Nouadhibou Int'l, 7, mid, POINT(-17.0334398691538 20.9290523064387) -NGB, Ningbo Lishe Int'l, 7, major, POINT(121.461819388484 29.8208231906861) -NKC, Nouakchott Int'l, 7, mid, POINT(-15.9519259252201 18.0979231718174) -NOU, La Tontouta Int'l, 7, mid, POINT(166.217232118699 -22.0136386248981) -OAK, Oakland Int'l, 7, major, POINT(-122.213261257863 37.7123036951691) -ONT, Ontario Int'l, 7, major, POINT(-117.592327651651 34.060191102066) -ORK, Cork, 7, major, POINT(-8.49014199983817 51.8485405419923) -PDG, Minangkabau Int'l, 7, mid, POINT(100.285455851791 -0.786045714026273) -PDL, João Paulo II, 7, mid, POINT(-25.6969882198711 37.7433316472933) -PEW, Bacha Khan Int'l, 7, mid, POINT(71.5188149912667 33.9914027889596) -PIK, Glasgow Prestwick, 7, mid, POINT(-4.61097163901068 55.5088918105142) -PMG, Ponta Porã Int'l, 7, mid, POINT(-55.7060793748573 -22.551786560876) -PMR, Palmerston N. Int'l, 7, mid, POINT(175.62128328196 -40.3233178852055) -PNI, Pohnpei Int'l, 7, mid, POINT(158.203304490964 6.98130676512123) -PPT, Tahiti Faa'a Int'l, 7, mid, POINT(-149.609757932429 -17.5594577659942) -PSA, Pisa Galileo Galilei Int'l, 7, [major,military], POINT(10.4001343718056 43.6983224157664) -PZU, Port Sudan, 7, [mid,military], POINT(37.216065757542 19.5760636531968) -RAI, Praia Int'l, 7, mid, POINT(-23.4862019883587 14.9449889352832) -RAK, Marrakech-Menara, 7, mid, POINT(-8.02460535907989 31.6022946597764) -RAR, Rarotonga Int'l, 7, mid, POINT(-159.798156308387 -21.2009821724632) -REP, Siem Reap Int'l, 7, major, POINT(103.815780528112 13.4087969693538) -RGA, Hermes Quijada Int'l, 7, mid, POINT(-67.7530268462675 -53.7814746058316) -RGL, Piloto Civil Norberto Fernandez Int'l, 7, mid, POINT(-69.3064711776731 -51.6116980855402) -RNO, Reno-Tahoe Int'l, 7, major, POINT(-119.775283308105 39.5058499014703) -ROR, Roman Tmetuchl Int'l, 7, mid, POINT(134.532953466159 7.3644955361292) -SID, Amilcar Cabral Int'l, 7, mid, POINT(-22.9440574079648 16.7347932693385) -SJJ, Sarajevo, 7, major, POINT(18.3366185457127 43.8258872246797) -SKB, Robert L. Bradshaw Int'l, 7, mid, POINT(-62.7142125047316 17.311125840442) -SLA, Martín Miguel de Güemes Int, 7, mid, POINT(-65.4784760437796 -24.8443742713315) -SPN, Saipan Int'l, 7, mid, POINT(145.723694658638 15.1215167197664) -SRE, Juana Azurduy de Padilla Int'l, 7, mid, POINT(-65.2928631387847 -19.0139157924657) -SXM, Princess Juliana Int'l, 7, major, POINT(-63.1122760858602 18.042244021474) -TAI, Ta'izz Int'l, 7, mid, POINT(44.134782731062 13.6854970025574) -TAO, Qingdao Liuting Int'l, 7, mid, POINT(120.380685949061 36.2677578081039) -TKK, Chuuk Int'l, 7, mid, POINT(151.842046037403 7.45761780288443) -TNG, Tangier Ibn Battouta, 7, mid, POINT(-5.91288087655914 35.7257656409274) -TRW, Bonriki Int'l, 7, mid, POINT(173.145990795301 1.3806686975383) -TSE, Astana Int'l, 7, major, POINT(71.4609441399936 51.0269352907712) -TSN, Tianjin Binhai Int'l, 7, major, POINT(117.352723159919 39.1294609909008) -TSV, Townsville, 7, [major,military], POINT(146.77067890477 -19.2561814376212) -TUC, Teniente Gen. Benjamin Matienzo Int'l, 7, mid, POINT(-65.1081246236248 -26.8357310050714) -TUN, Aeroport Tunis, 7, major, POINT(10.2176992447111 36.8474482177219) -TUS, Tucson Int'l, 7, major, POINT(-110.937713232132 32.1203523441898) -ULN, Chinggis Khaan Int'l, 7, mid, POINT(106.762873994929 47.8525260966684) -URC, Ürümqi Diwopu Int'l, 7, major, POINT(87.4671298487808 43.8983382193653) -VLI, Bauerfield Int'l, 7, mid, POINT(168.319622739662 -17.7016990681781) -WWK, Wewak Int'l, 7, mid, POINT(143.669102299698 -3.58022689444744) -XCR, Châlons Vatry, 7, [military,mid], POINT(4.19111982574289 48.7803946138566) -XMN, Xiamen Gaoqi Int'l, 7, major, POINT(118.12696884672 24.537192570557) -YAP, Yap Int'l, 7, mid, POINT(138.086430283619 9.49791733361348) -ZLO, Playa de Oro Int'l, 7, mid, POINT(-104.560095200097 19.1480860285854) -CAY, Cayenne – Rochambeau, 7, mid, POINT(-52.3638068572357 4.82126714308924) -UIII, Irkutsk N.W., 7, mid, POINT(104.197359284494 52.3616476700131) -SJW, Shijiazhuang Zhengding Int'l, 7, major, POINT(114.692266598902 38.278140913112) -GYD, Heydar Aliyev Int'l, 7, major, POINT(50.0498394867405 40.462746883908) -LAK, Lakatamia Airbase, 7, [military,mid], POINT(33.322201334899 35.1063448067362) -CFB, Cabo Frio Int'l, 7, mid, POINT(-42.0792517520184 -22.9256317091328) -HEM, Helsinki-Malmi, 7, mid, POINT(25.0455353698315 60.2493778499587) -LUX, Luxembourg-Findel, 7, major, POINT(6.21642121728731 49.6343040925102) -VCE, Venice Marco Polo, 7, major, POINT(12.3410673004369 45.5048477588455) -YNY, Yangyang Int'l, 7, mid, POINT(128.66298866884 38.0587824162585) -TBT, Tabatinga Int'l, 7, mid, POINT(-69.939473933909 -4.25032469493379) -BVB, Boa Vista Int'l, 7, mid, POINT(-60.6922206338682 2.84119534121157) -LPA, Gran Canaria, 7, major, POINT(-15.3899245158461 27.9368899716574) -ING, Com. Armando Tola Int'l, 7, mid, POINT(-72.0538569101296 -50.2839008690038) -NYO, Stockholm-Skavsta, 7, mid, POINT(16.9216055584254 58.7851041303448) -MES, Polonia Int'l, 7, mid, POINT(98.6761925714641 3.56659179990894) -BGF, Bangui M'Poko Int'l, 7, mid, POINT(18.524123630208 4.39885153695957) -HGH, Hangzhou Xiaoshan Int'l, 7, major, POINT(120.432097376313 30.2351862790414) -CXI, Cassidy Int'l, 7, mid, POINT(-157.34977789343 1.98616119792402) -SQQ, Šiauliai Int'l, 7, mid, POINT(23.3831885738691 55.90376945404) -IUE, Niue Int'l, 7, mid, POINT(-169.926129774217 -19.0767129354511) -AGT, Guaraní Int'l, 7, mid, POINT(-54.8393995296062 -25.4568570715812) -AQP, Rodríguez Ballón Int'l, 7, mid, POINT(-71.5679335385285 -16.344552065352) -VVO, Vladivostok Int'l, 7, [mid,military], POINT(132.139841720715 43.3776492533885) -PRN, Pristina, 7, major, POINT(21.0302690124746 42.5850331153448) -ANR, Deurne, 6, mid, POINT(4.45092277399909 51.1891285063806) -LAP, Gen. Márquez de León Int'l, 6, mid, POINT(-110.367197859809 24.0760903521803) -HRB, Harbin Taiping, 6, major, POINT(126.236983030863 45.6206011723245) -TRV, Trivandrum Int'l, 6, mid, POINT(76.9189025612913 8.47650993894514) -ADB, Adnan Menderes, 6, major, POINT(27.1492975952664 38.2912347645175) -NKG, Nanjing Lukou Int'l, 6, major, POINT(118.866102146906 31.7353249296177) -FPO, Freeport Int'l, 6, mid, POINT(-78.7039343114497 26.548246747189) -TIP, Tripoli Int'l, 6, major, POINT(13.1442589810713 32.6691695504993) -YQX, Gander Int'l, 6, mid, POINT(-54.5755719093578 48.9465980060736) -DOH, Doha Int'l, 6, [major,military], POINT(51.5585487876547 25.2682461310506) -ABQ, Albuquerque Int'l, 6, major, POINT(-106.6166851616 35.0491578018276) -ANU, V.C. Bird Int'l, 6, mid, POINT(-61.7923676698358 17.1403599371617) -APW, Faleolo, 6, mid, POINT(-171.99732221834 -13.8325013323956) -ATZ, Asyut, 6, mid, POINT(31.0162490438011 27.0508158406978) -BAH, Bahrain Int'l, 6, major, POINT(50.6260028757534 26.2696971499497) -BDL, Bradley Int'l, 6, major, POINT(-72.685394743339 41.9303160058352) -BGI, Grantley Adams Int'l, 6, mid, POINT(-59.4874188953158 13.079661104553) -BJL, Yundum Int'l, 6, mid, POINT(-16.6523132698075 13.3438604788942) -BJM, Bujumbura Int'l, 6, mid, POINT(29.3209840169939 -3.32204434913113) -BLZ, Chileka Int'l, 6, mid, POINT(34.9719441837933 -15.6813844793272) -BME, Broome Int'l, 6, mid, POINT(122.233850515022 -17.952576129268) -BND, Bandar Abbass Int'l, 6, mid, POINT(56.368886456411 27.2103258455145) -BSR, Basrah Int'l, 6, major, POINT(47.6683766633518 30.552799016106) -CJS, Ciudad Juarez Int'l, 6, mid, POINT(-106.435846631055 31.6357566201951) -CMB, Katunayake Int'l, 6, major, POINT(79.8852573421506 7.17807710544221) -CNS, Cairns Int'l, 6, mid, POINT(145.7535848444 -16.8767421554062) -CNX, Chiang Mai Int'l, 6, major, POINT(98.9681181241593 18.7688473919675) -COS, City of Colorado Springs, 6, major, POINT(-104.700880274111 38.7974248779125) -CPE, Ign. Alberto Ongay Int'l, 6, mid, POINT(-90.5036283734038 19.8142247992074) -CSX, Changsha Huanghua Int'l, 6, major, POINT(113.214054203252 28.1899218619451) -CVG, Greater Cincinnati Int'l, 6, major, POINT(-84.6561699153392 39.055418904783) -DAD, Da Nang, 6, major, POINT(108.202706257936 16.053144145167) -DAL, Dallas Love Field, 6, major, POINT(-96.84986377098 32.8444253732738) -DAM, Damascus Int'l, 6, major, POINT(36.5128954718126 33.4114366702732) -DAV, Enrique Malek Int'l, 6, mid, POINT(-82.4317583369387 8.39126106116917) -DIR, Aba Tenna D. Yilma Int'l, 6, mid, POINT(41.857756722253 9.61267784753569) -DPS, Bali Int'l, 6, major, POINT(115.162322961107 -8.74475731595652) -DSM, Des Moines Int'l, 6, major, POINT(-93.6484612563736 41.5327904242113) -EBB, Entebbe Int'l, 6, mid, POINT(32.4427573135214 0.044940949388672) -FKI, Kisangani Bangoka Int'l, 6, mid, POINT(25.3302714896212 0.492225136917501) -FOC, Fuzhou Changle Int'l, 6, mid, POINT(119.668043820999 25.9318233148143) -GAU, Lokpriya G. Bordoloi Int'l, 6, mid, POINT(91.588229058187 26.1052475924255) -GDN, Gdansk Lech Walesa, 6, major, POINT(18.4684422165911 54.3807025352925) -GND, Point Salines Int'l, 6, mid, POINT(-61.7858529909285 12.0072683054283) -GOJ, Nizhny Novgorod Int'l, 6, mid, POINT(43.7896337062935 56.2185525910656) -GYM, Gen. José M. Yáñez Int'l, 6, mid, POINT(-110.921651270402 27.9694553962829) -HET, Hohhot Baita Int'l, 6, mid, POINT(111.814681821626 40.8540600906552) -HLN, Helena Reg., 6, mid, POINT(-111.989896896008 46.6102043529) -HMO, Gen. Ignacio P. Garcia Int'l, 6, mid, POINT(-111.051901711819 29.0900772523445) -IAD, Dulles Int'l, 6, major, POINT(-77.4477925769206 38.952774037953) -ITO, Hilo Int'l, 6, mid, POINT(-155.039629733435 19.7147976868663) -JAN, Jackson Int'l, 6, major, POINT(-90.0750986276924 32.3100600273635) -JAX, Jacksonville Int'l, 6, major, POINT(-81.6835767278311 30.491352730948) -KCH, Kuching Int'l, 6, mid, POINT(110.341837054315 1.4872079377901) -KGL, Kigali Int'l, 6, mid, POINT(30.1348768187856 -1.96365443664138) -KRK, Kraków-Balice, 6, major, POINT(19.8009772844504 50.0722630648331) -KUF, Kurumoch, 6, major, POINT(50.1472655210191 53.5083848190935) -KWL, Guilin Liangjiang Int'l, 6, major, POINT(110.04689349777 25.2176055252293) -LAO, Laoag Int'l, 6, mid, POINT(120.533876196127 18.1824180866379) -LGA, LaGuardia, 6, major, POINT(-73.8719858204814 40.7745539398858) -LGW, London Gatwick, 6, major, POINT(-0.162961639139456 51.1557567519275) -LJU, Ljubljana, 6, major, POINT(14.4548126283266 46.2305445554486) -LKO, Amausi Int'l, 6, mid, POINT(80.8841719732472 26.7639328700916) -LPG, La Plata, 6, mid, POINT(-57.895382063651 -34.9655441559234) -MAM, Gen. Sevando Canales, 6, mid, POINT(-97.5308217121187 25.7708412640619) -MAN, Manchester Int'l, 6, major, POINT(-2.27337159069427 53.3624896066518) -MCI, Kansas City Int'l, 6, major, POINT(-94.7159148579154 39.2978958263659) -MCT, Seeb Int'l, 6, major, POINT(58.2904804753493 23.5885704175856) -MIR, Habib Bourguiba Int'l, 6, mid, POINT(10.753368185054 35.760710442178) -MRS, Marseille Provence Airport, 6, major, POINT(5.22137917720337 43.4410600016468) -NLD, Quetzalcoatl Int'l, 6, mid, POINT(-99.5680081930063 27.4496896508316) -NNG, Nanning Wuwu Int'l, 6, major, POINT(108.168012273331 22.6120370541785) -OAX, Xoxocotlán Int'l, 6, mid, POINT(-96.7217959384975 17.0005592569745) -OGG, Kahului, 6, mid, POINT(-156.437429581353 20.8932885151112) -OKC, Will Rogers, 6, major, POINT(-97.5961177542092 35.3952774911744) -ORF, Norfolk Int'l, 6, major, POINT(-76.2044231712327 36.8982394673674) -PBI, Palm Beach Int'l, 6, major, POINT(-80.0901893383387 26.688441666433) -PBM, Pengel Int'l, 6, mid, POINT(-55.1999113892902 5.45599967797439) -PEE, Bolshesavino, 6, mid, POINT(56.0195602820297 57.9197711231691) -PEN, Penang Int'l, 6, mid, POINT(100.265786380955 5.29265627790489) -PHC, Port Harcourt Int'l, 6, mid, POINT(6.94989742723191 5.00700347673943) -PHE, Port Hedland Int'l, 6, mid, POINT(118.631797815615 -20.3781272960723) -PIR, Pierre Regional, 6, mid, POINT(-100.292641981705 44.3801534668762) -PIT, Greater Pittsburgh Int'l, 6, major, POINT(-80.2561290571918 40.4960518915285) -PPG, Pago Pago Int'l, 6, mid, POINT(-170.713307053734 -14.3290641850306) -BHX, Birmingham Int'l, 6, major, POINT(-1.73373170434452 52.4529085542838) -ROB, Roberts Int'l, 6, mid, POINT(-10.3530851867934 6.24183456554525) -RPR, Raipur, 6, mid, POINT(81.7403775915201 21.1859868561447) -SAL, El Salvador Int'l, 6, mid, POINT(-89.0572035692743 13.4447481228616) -SAN, San Diego Int'l, 6, major, POINT(-117.197511025731 32.7322645570132) -SAT, San Antonio Int'l, 6, major, POINT(-98.4719699991559 29.5266203391315) -SAV, Savannah Int'l, 6, major, POINT(-81.2099647750913 32.1356415522902) -SCU, Antonio Maceo, 6, mid, POINT(-75.8398877639791 19.9724288717622) -SLP, Ponciano Arriaga Int'l, 6, mid, POINT(-100.936477816267 22.2557130495903) -SMF, Sacramento Int'l, 6, major, POINT(-121.587894877723 38.6927238925554) -STI, Cibao Int'l, 6, mid, POINT(-70.6941783224468 19.4659219152888) -SVX, Koltsovo, 6, major, POINT(60.8058033432174 56.732245612046) -SYR, Syracuse Hancock Int'l, 6, major, POINT(-76.1130789991049 43.1317844943741) -TBZ, Tabriz, 6, mid, POINT(46.244713373574 38.1311107688175) -TRC, Torreon Int'l, 6, mid, POINT(-103.398787828579 25.5632164399896) -TUL, Tulsa Int'l, 6, major, POINT(-95.889882271542 36.190127565195) -TYS, Mcghee Tyson, 6, major, POINT(-83.9899378327585 35.8057448027088) -UFA, Ufa Int'l, 6, major, POINT(55.8840773411837 54.5651323578972) -UVF, Hewanorra Int'l, 6, mid, POINT(-60.9499737723461 13.7365238050489) -WDH, Windhoek Hosea Kutako Int'l, 6, mid, POINT(17.4632259028133 -22.4869531202041) -YAM, Sault Ste Marie, 6, mid, POINT(-84.5006089999717 46.4854175101926) -YDQ, Dawson Cr., 6, mid, POINT(-120.185595619101 55.7394117074557) -YEG, Edmonton Int'l, 6, major, POINT(-113.584492564406 53.3072001619183) -YHZ, Halifax Int'l, 6, major, POINT(-63.5149652501673 44.886545450101) -YKA, Kamloops, 6, mid, POINT(-120.441734763962 50.7051955184591) -YSB, Sudbury, 6, mid, POINT(-80.7957747817105 46.6227508204893) -YSJ, Saint John, 6, mid, POINT(-65.8905573681168 45.3292305955017) -YXS, Prince George, 6, mid, POINT(-122.674014743986 53.8842485751138) -YYJ, Victoria Int'l, 6, major, POINT(-123.430624539528 48.640529482179) -ZAM, Zamboanga Int'l, 6, mid, POINT(122.062432321637 6.9197577480583) -ZGC, Lanzhou Zhongchuan, 6, mid, POINT(103.615415363043 36.5078842461237) -ALB, Albany Int'l, 6, mid, POINT(-73.8093518843173 42.7456619801729) -MKE, General Mitchell Int'l, 6, major, POINT(-87.9021056250744 42.9479198729586) -ZHHH, Wang-Chia Tun Airbase, 6, [military,mid], POINT(114.24694737615 30.6017141196702) -SYX, Sanya Phoenix Int'l, 6, major, POINT(109.40823949108 18.3090959908593) -LXA, Lhasa Gonggar, 6, mid, POINT(90.9005610194027 29.2936936123184) -HTN, Hotan, 6, mid, POINT(79.8723005212191 37.0400363509765) -DRS, Dresden, 6, major, POINT(13.7649671440047 51.1250912428871) -NNA, Kenitra Air Base, 6, [military,major], POINT(-6.597753628116 34.2986673638223) -QNJ, Annemasse, 6, mid, POINT(6.26491085364159 46.1957283286261) -NOG, Nogales Int'l, 6, mid, POINT(-110.972721301675 31.2255371741159) -SXB, Strasbourg, 6, mid, POINT(7.62784196688924 48.5446961721759) -CGN, Cologne/Bonn, 6, major, POINT(7.12235975524539 50.8782596629471) -PUS, Kimhae Int'l, 6, major, POINT(128.948801379039 35.1702840636829) -CJU, Jeju Int'l, 6, major, POINT(126.491629401972 33.5247173150399) -SVG, Stavanger Sola, 6, major, POINT(5.6298103297218 58.8821564842185) -TRD, Trondheim Vaernes, 6, major, POINT(10.9168095241445 63.472029381717) -CMG, Corumbá Int'l, 6, mid, POINT(-57.6636078925543 -19.0141662885534) -FNC, Madeira, 6, mid, POINT(-16.7756374531213 32.6933642847489) -IGU, Foz do Iguaçu Int'l, 6, mid, POINT(-54.4885922735633 -25.5976832162102) -PVH, Gov. Jorge Teixeira de Oliveira Int'l, 6, mid, POINT(-63.8984625004213 -8.71442482859288) -BIO, Bilbao, 6, mid, POINT(-2.90609011679805 43.3050829811195) -PMI, Palma de Mallorca, 6, major, POINT(2.72997660200647 39.5657758586254) -TFN, Tenerife N., 6, major, POINT(-16.3463175679264 28.4875770267731) -GOT, Gothenburg, 6, major, POINT(12.2938269092573 57.6857493534879) -LLA, Lulea, 6, major, POINT(22.1230271243945 65.5490362477616) -AUH, Abu Dhabi Int'l, 6, major, POINT(54.6463293225558 24.4272271529764) -CZL, Mohamed Boudiaf Int'l, 6, mid, POINT(6.62194665181219 36.2834409441601) -ASW, Aswan Int'l, 6, mid, POINT(32.8244372462973 23.9682765441778) -RVN, Rovaniemi, 6, mid, POINT(25.8294409760452 66.5595564168509) -GEO, Cheddi Jagan Int'l, 6, mid, POINT(-58.2541191925889 6.49855290813572) -COK, Cochin Int'l, 6, major, POINT(76.3905198502024 10.1551187628118) -EDL, Eldoret Int'l, 6, mid, POINT(35.2236930658301 0.40507147546036) -ICN, Incheon Int'l, 6, major, POINT(126.450875980796 37.4492088624346) -CUL, Federal de Bachigualato Int'l, 6, mid, POINT(-107.469863792896 24.7668040390461) -ISB, Benazir Bhutto Int'l, 6, [major,military], POINT(73.1007936471882 33.6074457507526) -BRU, Brussels, 5, major, POINT(4.48464032408272 50.8972949641511) -ABV, Abuja Int'l, 5, major, POINT(7.27025993974356 9.00437659781094) -ACV, Arcata-Eureka, 5, mid, POINT(-124.107065520139 40.9719245381314) -AUS, Austin-Bergstrom Int'l, 5, major, POINT(-97.6668367646054 30.2021081920749) -AYT, Antalya, 5, major, POINT(30.8025526439415 36.9153233051868) -BFS, Belfast Int'l, 5, major, POINT(-6.21616943734958 54.6615575470103) -BGY, Orio Al Serio, 5, major, POINT(9.6989176939974 45.6654980560695) -BKI, Kota Kinabalu Int'l, 5, mid, POINT(116.051087873369 5.92289445474807) -BLR, Bengaluru Int'l, 5, major, POINT(77.7095579889575 13.2006108069609) -CBR, Canberra Int'l, 5, major, POINT(149.190760539671 -35.3071855902909) -CMH, Port Columbus Int'l, 5, major, POINT(-82.8840306426634 39.9981181922432) -CMN, Mohamed V Int'l, 5, major, POINT(-7.5814559902572 33.3747274815396) -DUS, Düsseldorf Int'l, 5, major, POINT(6.76494446612174 51.2781820420774) -ESB, Esenboğa Int'l, 5, major, POINT(32.9930100772014 40.1151278273234) -HLZ, Hamilton Int'l, 5, mid, POINT(175.336221432708 -37.8658411484827) -HYD, Rajiv Gandhi Int'l, 5, major, POINT(78.42953613452 17.2359831507471) -JFK, John F Kennedy Int'l, 5, major, POINT(-73.7863268609295 40.6459595584081) -KBP, Boryspil Int'l, 5, major, POINT(30.8951621615528 50.340902338877) -KRT, Khartoum, 5, major, POINT(32.550153296633 15.5922226530858) -MSN, Dane Cty. Reg. (Truax Field), 5, major, POINT(-89.3457847894487 43.1363082385868) -MSQ, Minsk Int'l, 5, major, POINT(28.0341933346378 53.8893792398005) -PMO, Palermo, 5, major, POINT(13.1055309888638 38.1863351084895) -PVD, T.F. Green, 5, mid, POINT(-71.4357841445789 41.7260019847189) -RSW, Southwest Florida Int'l, 5, major, POINT(-81.7551231409306 26.5279288067651) -SHE, Shenyang Taoxian Int'l, 5, major, POINT(123.487974430338 41.6347891339582) -SHJ, Sharjah Int'l, 5, major, POINT(55.5205071948853 25.3211964019068) -SJC, San Jose Int'l, 5, major, POINT(-121.929428983532 37.3694905908965) -SNA, John Wayne, 5, major, POINT(-117.861489220393 33.6794857329549) -STR, Stuttgart, 5, major, POINT(9.19395108945536 48.6901051358913) -SYQ, Nacional Tobías Bolaños, 5, mid, POINT(-84.1386091971594 9.95827851919623) -SZX, Shenzhen Bao'an Int'l, 5, major, POINT(113.815852751085 22.6465077147868) -SDF, Louisville Int'l, 5, major, POINT(-85.7417027597367 38.1860207152699) -GVA, Geneva, 5, major, POINT(6.10794577423603 46.231009510158) -LYS, Lyon-Saint Exupery, 5, mid, POINT(5.07594431813459 45.7210186834669) -KIX, Kansai Int'l, 5, major, POINT(135.244459772476 34.4347941629269) -LIS, Lisbon Portela, 5, major, POINT(-9.13069440931071 38.7707623427514) -CNF, Tancredo Neves Int'l, 5, major, POINT(-43.9635815209949 -19.6327821218747) -BMA, Bromma, 5, mid, POINT(17.9456175406145 59.3555902065112) -SUB, Juanda Int'l, 5, major, POINT(112.777034594933 -7.383578985276) -MDQ, Astor Piazzolla Int'l, 5, mid, POINT(-57.5816150932392 -37.9332161204482) -GCM, Owen Roberts Int'l, 5, major, POINT(-81.3576706162289 19.2959107437122) -CGO, Zhengzhou Xinzheng Int'l, 5, major, POINT(113.841831302845 34.5263027198957) -DLC, Dalian Zhoushuizi Int'l, 5, major, POINT(121.538913780101 38.9615702300222) -HER, Heraklion Int'l, 5, major, POINT(25.1740558243272 35.3369024101045) -TBS, Tbilisi Int'l, 5, major, POINT(44.9646146141664 41.6694420187261) -XXC, Cascais, 5, mid, POINT(-9.35458240263928 38.7235353208323) -KHH, Kaohsiung Int'l, 4, major, POINT(120.345156342151 22.5717061054422) -SKO, Sadiq Abubakar III, 4, mid, POINT(5.20022616032651 12.9174824166181) -UIO, Mariscal Sucre Int'l, 4, mid, POINT(-78.4899925545701 -0.145552408466882) -KHI, Karachi Civil, 4, mid, POINT(67.1521283592947 24.8985243689595) -KIV, Kishinev S.E., 4, mid, POINT(28.9360487562255 46.9341619900391) -LIM, Jorge Chávez, 4, major, POINT(-77.1075656931342 -12.0237161502221) -YQT, Thunder Bay Int'l, 4, mid, POINT(-89.3121421238136 48.3718811492508) -VNO, Vilnius, 4, major, POINT(25.2807164497285 54.6430549307542) -XIY, Hsien Yang, 4, major, POINT(108.755811342151 34.4429391054422) -NTR, Del Norte Int'l, 4, mid, POINT(-100.238394186577 25.859873767729) -TBU, Fua'amotu Int'l, 4, mid, POINT(-175.135635 -21.24861) -IFN, Esfahan Int'l, 4, mid, POINT(51.8763916812681 32.7460805344321) -HRE, Harare Int'l, 4, mid, POINT(31.1014 -17.9228) -KWI, Kuwait Int'l, 4, major, POINT(47.9714825593316 29.2396800581583) -YOW, Macdonald-Cartier Int'l, 4, major, POINT(-75.6648933870205 45.3201348196531) -KBL, Kabul Int'l, 4, mid, POINT(69.2100736270874 34.5633978864149) -ABJ, Abidjan Port Bouet, 4, mid, POINT(-3.93221929167636 5.2543984451492) -ACA, General Juan N Alvarez Int'l, 4, major, POINT(-99.7545085619681 16.76196735278) -ACC, Kotoka Int'l, 4, major, POINT(-0.171402855660817 5.60698152381193) -ADD, Bole Int'l, 4, mid, POINT(38.7931904366343 8.98173027581099) -ADE, Aden Int'l, 4, mid, POINT(45.030602 12.8278) -ADL, Adelaide Int'l, 4, mid, POINT(138.532101457699 -34.9405860275154) -ALA, Almaty Int'l, 4, major, POINT(77.0120458771175 43.3464943144793) -ALG, Houari Boumediene, 4, major, POINT(3.21207353516506 36.6997206663535) -ALP, Aleppo Int'l, 4, major, POINT(37.2273414057828 36.1846237314314) -AMD, Sardar Vallabhbhai Patel Int'l, 4, mid, POINT(72.6209000884332 23.0707454635881) -ANF, Cerro Moreno Int'l, 4, mid, POINT(-70.4409908509407 -23.4489545248317) -ASB, Ashkhabad Northwest, 4, mid, POINT(58.3639659208246 37.984853438957) -ASM, Yohannes Iv Int'l, 4, mid, POINT(38.9063540136321 15.2936159696499) -ASU, Silvio Pettirossi Int'l, 4, mid, POINT(-57.5139078247136 -25.2416592533816) -BDA, Bermuda Int'l, 4, mid, POINT(-64.7027740686514 32.3591739601581) -BEG, Surcin, 4, major, POINT(20.2912845946621 44.8190766654433) -BEY, Beirut Int'l, 4, major, POINT(35.4930853618161 33.8254400618668) -BHO, Bairagarh, 4, mid, POINT(77.3408714713579 23.2855684869809) -BKO, Bamako Sénou, 4, mid, POINT(-7.94727226970801 12.5393363425867) -BNA, Nashville Int'l, 4, major, POINT(-86.6692867356375 36.1314876361697) -BNE, Brisbane Int'l, 4, major, POINT(153.120256418844 -27.3853965939099) -BOI, Boise Air Terminal, 4, major, POINT(-116.221841070549 43.5689592234704) -BRW, Wiley Post Will Rogers Mem., 4, mid, POINT(-156.771835 71.289299) -BUF, Greater Buffalo Int'l, 4, major, POINT(-78.7319965523308 42.9340337493526) -BUQ, Bulawayo, 4, mid, POINT(28.622552042904 -20.0155684094908) -BWN, Brunei Int'l, 4, major, POINT(114.933119029209 4.94547528227685) -CAN, Guangzhou Baiyun Int'l, 4, major, POINT(113.297516552171 23.3891511573243) -CCP, Carriel Sur Int'l, 4, mid, POINT(-73.0621061746214 -36.7763727437881) -CCU, Netaji Subhash Chandra Bose Int'l, 4, major, POINT(88.4400010130197 22.6453893785064) -CGP, Chittagong, 4, mid, POINT(91.8147107162383 22.2455658585738) -CHC, Christchurch Int'l, 4, major, POINT(172.538675565223 -43.4885486784104) -CKY, Conakry, 4, mid, POINT(-13.6210656251671 9.57418115850082) -CLE, Hopkins Int'l, 4, major, POINT(-81.8384406064046 41.4111916124966) -CLO, Alfonso Bonilla Aragón Int'l, 4, mid, POINT(-76.3850714728091 3.54328635123219) -COO, Cotonou Cadjehon, 4, mid, POINT(2.3838000724352 6.3582465034691) -COR, Ingeniero Ambrosio L.V. Taravella Int'l, 4, mid, POINT(-64.2123157670801 -31.3156811684889) -CTG, Rafael Nunez, 4, mid, POINT(-75.5123349559682 10.4449381764915) -CUN, Cancún, 4, major, POINT(-86.8744172506694 21.04019667144) -CUU, General R F Villalobos Int'l, 4, mid, POINT(-105.969204692629 28.7039984997679) -DAC, Zia Int'l Dhaka, 4, mid, POINT(90.4049241599237 23.8481243218127) -DRW, Darwin Int'l, 4, [major,military], POINT(130.877501436774 -12.4080559966556) -DUR, Louis Botha, 4, mid, POINT(30.9457081940881 -29.965914250828) -FBM, Lubumbashi Luano Int'l, 4, mid, POINT(27.5292 -11.5908) -FEZ, Saiss, 4, mid, POINT(-4.98214637678303 33.9305251844673) -FIH, Kinshasa N Djili Int'l, 4, mid, POINT(15.4465162074561 -4.38916882197582) -FNA, Freetown Lungi, 4, mid, POINT(-13.2002296786483 8.61542361726369) -FNJ, Sunan, 4, mid, POINT(125.675321571201 39.2001771667656) -FRU, Vasilyevka, 4, major, POINT(74.468800339909 43.0554527233303) -GBE, Sir Seretse Khama Int'l, 4, mid, POINT(25.9243808264147 -24.5580718089441) -GDL, Don Miguel Hidalgo Int'l, 4, major, POINT(-103.300766222752 20.5246863485173) -GLA, Glasgow Int'l, 4, major, POINT(-4.43167796995107 55.8641828570355) -GUA, La Aurora, 4, mid, POINT(-90.530181111378 14.5881608290051) -GYE, Simon Bolivar Int'l, 4, mid, POINT(-79.887009643933 -2.15833790699136) -HAN, Noi Bai, 4, major, POINT(105.803759436806 21.2145596707245) -HAV, José Martí Int'l, 4, major, POINT(-82.4074206289499 22.9973533364428) -HBE, Borg El Arab Int'l, 4, mid, POINT(29.69266601523 30.9183712786239) -JED, King Abdul Aziz Int'l, 4, major, POINT(39.1504996780448 21.6706857878128) -KAN, Kano Mallam Aminu Int'l, 4, mid, POINT(8.52213718395767 12.0457071601746) -KHG, Kashi, 4, mid, POINT(76.0130148060075 39.5379686306258) -KIN, Norman Manley Int'l, 4, major, POINT(-76.7786897616576 17.9375751552752) -KTM, Tribhuvan Int'l, 4, mid, POINT(85.357139531668 27.7002816751609) -LAD, Luanda 4 de Fevereiro, 4, mid, POINT(13.2347957502699 -8.84831327917379) -LED, Pulkovo 2, 4, major, POINT(30.3070976454648 59.8054061601897) -LHE, Allama Iqbal Int'l, 4, mid, POINT(74.4108810181748 31.5206296518206) -LLW, Kamuzu Int'l, 4, mid, POINT(33.7827885019788 -13.788622823746) -LOS, Lagos Murtala Muhammed, 4, major, POINT(3.32112435281334 6.57825944540467) -LPB, El Alto Int'l, 4, mid, POINT(-68.1780055277945 -16.5098792213977) -LUN, Lusaka Int'l, 4, mid, POINT(28.4455443211019 -15.3268522509447) -LXR, Luxor, 4, mid, POINT(32.7032970848623 25.6730347786023) -MAA, Chennai Int'l, 4, major, POINT(80.1637759731545 12.9825301669154) -MAR, La Chinita Int'l, 4, mid, POINT(-71.7237688094687 10.5557594684972) -MDE, José María Córdova, 4, mid, POINT(-75.4269557399772 6.171001614358) -MEM, Memphis Int'l, 4, major, POINT(-89.9816280353237 35.0444101240089) -MGA, Augusto Cesar Sandino Int'l, 4, mid, POINT(-86.1712846229543 12.144635873435) -MHD, Mashhad, 4, major, POINT(59.6421943574029 36.2275503134984) -MIA, Miami Int'l, 4, major, POINT(-80.2789718277441 25.7949407212406) -MID, Lic M Crecencio Rejon Int'l, 4, mid, POINT(-89.6630235736434 20.9338603864296) -MLA, Luqa, 4, major, POINT(14.4952644555055 35.8489307943501) -MBA, Moi Int'l, 4, major, POINT(39.6026631870383 -4.03265262579657) -MSU, Moshoeshoe I Int'l, 4, mid, POINT(27.5592160333614 -29.4555740046101) -MSY, New Orleans Int'l, 4, major, POINT(-90.2566939480594 29.9851141460622) -MUX, Multan, 4, [major,military], POINT(71.418995432932 30.1950780904965) -MVD, Carrasco Int'l, 4, major, POINT(-56.026636146282 -34.8410485988569) -MZT, General Rafael Buelna Int'l, 4, mid, POINT(-106.270016617885 23.1665960971344) -NAS, Nassau Int'l, 4, major, POINT(-77.4648472290944 25.0486910600866) -NDJ, Ndjamena, 4, mid, POINT(15.0330446385559 12.1295400184115) -NIM, Niamey, 4, mid, POINT(2.17730671184125 13.4767572807942) -CEB, Mactan-Cebu Int'l, 4, major, POINT(123.979134508664 10.3158756727292) -NOV, Nova Lisboa, 4, mid, POINT(15.7497618459595 -12.8025414575915) -OMA, Eppley Airfield, 4, mid, POINT(-95.8994157953121 41.2997111453012) -OME, Nome, 4, mid, POINT(-165.441641712281 64.5072207026631) -OUA, Ouagadougou, 4, mid, POINT(-1.51380536165114 12.3535800260473) -PAP, Mais Gate Int'l, 4, mid, POINT(-72.2944780260473 18.5756829054286) -PBC, Puebla, 4, mid, POINT(-98.375759790423 19.163793546584) -PDX, Portland Int'l, 4, major, POINT(-122.592738881254 45.5889569315305) -PER, Perth Int'l, 4, major, POINT(115.974224942233 -31.9411297945783) -PLZ, H F Verwoerd, 4, mid, POINT(25.6117777567602 -33.9840877431374) -PMC, El Tepual Int'l, 4, mid, POINT(-73.0983841336424 -41.4333820702269) -PNH, Pochentong, 4, major, POINT(104.845027612457 11.5526449176513) -PNQ, Pune, 4, [major,military], POINT(73.9089838110016 18.5791766115328) -POM, Port Moresby Int'l, 4, major, POINT(147.211250855977 -9.43865269316142) -PTY, Tocumen Int'l, 4, major, POINT(-79.3871348215438 9.06687242265839) -PUQ, Carlos Ibáñez de Campo Int'l, 4, mid, POINT(-70.8431237851324 -53.0050698255177) -RDU, Durham Int'l, 4, major, POINT(-78.7913814006751 35.8752323452255) -RGN, Mingaladon, 4, major, POINT(96.1341946114947 16.9011542818251) -RIX, Riga, 4, major, POINT(23.9793791116995 56.9220038786097) -SAH, Sanaa Int'l, 4, mid, POINT(44.2246467902561 15.4739027755737) -SDA, Baghdad Int'l, 4, major, POINT(44.2289125352942 33.268162986377) -SDQ, De Las Américas Int'l, 4, major, POINT(-69.6764726754667 18.4302196948173) -SGN, Tan Son Nhat, 4, major, POINT(106.664246141375 10.8163005571879) -SKG, Thessaloniki, 4, major, POINT(22.9764353610613 40.5238736887775) -SOF, Vrazhdebna, 4, major, POINT(23.4024521357708 42.6891841273195) -STV, Surat, 4, major, POINT(72.7424384372589 21.1204503297172) -SUV, Nausori Int'l, 4, mid, POINT(178.560048369959 -18.0458996922854) -SYZ, Shiraz Int'l, 4, major, POINT(52.5897712745211 29.5458013842874) -TAM, Gen Francisco J Mina Int'l, 4, mid, POINT(-97.8698137568394 22.2893319525064) -TGU, Toncontin Int'l, 4, mid, POINT(-87.2192116348986 14.0599852192071) -THR, Mehrabad Int'l, 4, major, POINT(51.3208069717572 35.6913743304946) -TIA, Tirane Rinas, 4, major, POINT(19.7150324049722 41.4208514680567) -TIJ, General Abelardo L Rodriguez Int'l, 4, major, POINT(-116.975476095598 32.5460499135013) -TLC, Jose Maria Morelos Y Pavon, 4, mid, POINT(-99.5706494463542 19.3386880423032) -TLL, Ulemiste, 4, major, POINT(24.798964869983 59.4165014697451) -TLV, Ben Gurion, 4, major, POINT(34.8708499180995 32.0007468501844) -TMS, São Tomé Salazar, 4, mid, POINT(6.71282193005667 0.374744213699427) -TNR, Antananarivo Ivato, 4, mid, POINT(47.4753540009579 -18.7993348763082) -TPA, Tampa Int'l, 4, major, POINT(-82.534824252055 27.9800400852184) -VLN, Zim Valencia, 4, mid, POINT(-67.9223617121873 10.1540056883979) -VOG, Gumrak, 4, mid, POINT(44.354767968489 48.7916764657611) -VTE, Vientiane, 4, mid, POINT(102.568238195728 17.9754595948321) -VVI, Viru Viru Int'l, 4, mid, POINT(-63.1403888218213 -17.6479468257839) -WLG, Wellington Int'l, 4, major, POINT(174.811665268238 -41.3289891844659) -YPR, Prince Rupert, 4, mid, POINT(-130.445587 54.292) -YQG, Windsor, 4, mid, POINT(-82.9600877389448 42.2658784727198) -YQR, Regina, 4, mid, POINT(-104.655433975371 50.4332192867183) -YVR, Vancouver Int'l, 4, major, POINT(-123.180867003812 49.1935590395715) -YWG, Winnipeg Int'l, 4, major, POINT(-97.2267694809585 49.9033302471671) -YXE, John G Diefenbaker Int'l, 4, mid, POINT(-106.690181967554 52.1701439447381) -YXY, Whitehorse Int'l, 4, mid, POINT(-135.076210089402 60.7141521481397) -YYC, Calgary Int'l, 4, major, POINT(-114.010560500236 51.1308572567549) -YYG, Charlottetown, 4, mid, POINT(-63.1312341333234 46.2858131367525) -YYQ, Churchill, 4, mid, POINT(-94.0813639506318 58.7497237849788) -YYT, St John's Int'l, 4, mid, POINT(-52.7433337428638 47.6131179007955) -YZF, Yellowknife, 4, mid, POINT(-114.437846335049 62.4707373610202) -ZAG, Zagreb, 4, major, POINT(16.0615138009014 45.7333266730984) -ZNZ, Zanzibar, 4, mid, POINT(39.2223319841558 -6.21857034620282) -REK, Reykjavik Air Terminal, 4, mid, POINT(-21.9466344031327 64.1318728609901) -ARH, Arkhangelsk-Talagi, 4, mid, POINT(40.7133465694594 64.5967437730455) -KZN, Kazan Int'l, 4, major, POINT(49.2984458036407 55.6080601429764) -ORY, Paris Orly, 4, major, POINT(2.36737912783773 48.7313030458052) -YQB, Québec, 4, major, POINT(-71.3839280711731 46.7915684363308) -YUL, Montréal-Trudeau, 4, major, POINT(-73.7493162650417 45.4583512294531) -NRT, Narita Int'l, 4, major, POINT(140.384401709179 35.7640560727828) -NGO, Chubu Centrair Int'l, 4, major, POINT(136.814771286824 34.8590296958162) -OKD, Okadama, 4, mid, POINT(141.382100450075 43.1106495990978) -BGO, Bergen Flesland, 4, major, POINT(5.22725311562336 60.2890610502966) -TOS, Tromsø Langnes, 4, major, POINT(18.9072624292132 69.6796790473478) -BEL, Val de Caes Int'l, 4, mid, POINT(-48.4795602893793 -1.38974628795546) -CGR, Campo Grande Int'l, 4, mid, POINT(-54.6689498781305 -20.4572717360311) -CWB, Afonso Pena Int'l, 4, mid, POINT(-49.1737093663469 -25.5360001430558) -FOR, Pinto Martins Int'l, 4, mid, POINT(-38.5407472498334 -3.77859496233091) -GRU, São Paulo-Guarulhos Int'l, 4, major, POINT(-46.481753608842 -23.4261155770421) -GYN, Santa Genoveva, 4, mid, POINT(-49.2266464905994 -16.6323665721637) -POA, Salgado Filho Int'l, 4, mid, POINT(-51.1770409488172 -29.9901930170609) -REC, Gilberto Freyre Int'l, 4, mid, POINT(-34.9182667174851 -8.13162553076239) -SSA, Deputado Luis Eduardo Magalhaes Int'l, 4, mid, POINT(-38.3347989911732 -12.9143614970326) -MDZ, El Plumerillo, 4, mid, POINT(-68.7984838394473 -32.8278001692719) -MAO, Eduardo Gomes Int'l, 4, mid, POINT(-60.0460645898854 -3.0321390062591) -NSI, Yaoundé Nsimalen Int'l, 4, mid, POINT(11.5479941396807 3.71484520708126) -PVG, Shanghai Pudong Int'l, 4, major, POINT(121.801518760578 31.1523090295533) -ADJ, Marka Int'l, 4, mid, POINT(35.9841052362449 31.9741994015442) -MLE, Male Int'l, 4, major, POINT(73.5273902836844 4.18870090323372) -VER, Gen. Heriberto Jara Int'l, 4, mid, POINT(-96.1835702143695 19.1424237025017) -OXB, Osvaldo Vieira Int'l, 4, mid, POINT(-15.651185561666 11.8889231454855) -DVO, Francisco Bangoy Int'l, 4, major, POINT(125.645066609434 7.13053746163073) -SEZ, Seychelles Int'l, 4, mid, POINT(55.5115519246793 -4.67106914178521) -DKR, Léopold Sedar Senghor Int'l, 4, major, POINT(-17.490407907719 14.7456306146748) -PZU, Port Sudan New Int'l, 4, mid, POINT(37.2387475981025 19.4341052385231) -TAS, Tashkent Int'l, 4, major, POINT(69.2666137241129 41.2622338767383) -CPH, Copenhagen, 3, major, POINT(12.6493508684508 55.6285017221528) -BBU, Aeroportul National Bucuresti-Baneasa, 3, mid, POINT(26.0857251587764 44.497041455972) -BUD, Ferihegy, 3, major, POINT(19.2622301677881 47.433274269248) -CKG, Chongqing Jiangbei Int'l, 3, major, POINT(106.638019704811 29.7240422241688) -CLT, Douglas Int'l, 3, major, POINT(-80.9439277342763 35.2204281685597) -DTW, Detroit Metro, 3, major, POINT(-83.3478935065615 42.2257204508004) -DUB, Dublin, 3, major, POINT(-6.24388491037139 53.42700828497) -FAI, Fairbanks Int'l, 3, major, POINT(-147.865721120795 64.8180981117369) -HAM, Hamburg, 3, major, POINT(10.005647830925 53.6320011640866) -KUL, Kuala Lumpur Int'l, 3, major, POINT(101.713886325743 2.74751295791811) -LAS, Mccarran Int'l, 3, major, POINT(-115.151323951283 36.0849602383367) -MCO, Orlando Int'l, 3, major, POINT(-81.3073713307985 28.4311506791138) -MSP, Minneapolis St. Paul Int'l, 3, major, POINT(-93.2081003718301 44.8820263631968) -MUC, Franz-Josef-Strauss, 3, major, POINT(11.7880627192437 48.3538373961608) -PHL, Philadelphia Int'l, 3, major, POINT(-75.2429857676998 39.876087236427) -PHX, Sky Harbor Int'l, 3, major, POINT(-112.01363529773 33.4358607639498) -SLC, Salt Lake City Int'l, 3, major, POINT(-111.981984879993 40.7867290053708) -STL, Lambert St Louis Int'l, 3, major, POINT(-90.3659545350675 38.7427163155204) -WAW, Okecie Int'l, 3, major, POINT(20.9727263383587 52.171026749259) -ZRH, Zurich Int'l, 3, major, POINT(8.56221279534765 47.4523895064915) -CRL, Gosselies, 3, mid, POINT(4.4543736298165 50.4571296549567) -MUCf, Munich Freight Terminal, 3, major, POINT(11.7694828593654 48.3497964078377) -BCN, Barcelona, 3, major, POINT(2.07800334981292 41.3031552797463) -PRG, Ruzyn, 3, major, POINT(14.2674849854076 50.1076511703671) -HKG, Hong Kong Int'l, 2, major, POINT(113.935016387376 22.3153328280868) -TPE, Taoyuan, 2, major, POINT(121.231370453323 25.0767411043346) -AMS, Schiphol, 2, major, POINT(4.76437693232812 52.3089323889822) -SIN, Singapore Changi, 2, major, POINT(103.986413880993 1.35616083528126) -LHR, London Heathrow, 2, major, POINT(-0.453156652063309 51.4709958799938) -AKL, Auckland Int'l, 2, major, POINT(174.791719433715 -37.0063551142815) -ANC, Anchorage Int'l, 2, major, POINT(-149.981725100633 61.1728936745367) -ATL, Hartsfield-Jackson Atlanta Int'l, 2, major, POINT(-84.4253974336047 33.6405290807352) -PEK, Beijing Capital, 2, major, POINT(116.588174004661 40.078766336331) -BOG, Eldorado Int'l, 2, major, POINT(-74.1433718001028 4.69883276192097) -BOM, Chhatrapati Shivaji Int'l, 2, major, POINT(72.8745639500051 19.0951019488402) -BOS, Gen E L Logan Int'l, 2, major, POINT(-71.0164066172958 42.3665658198506) -BWI, Baltimore-Washington Int'l Thurgood Marshall,2, major, POINT(-76.6686428352448 39.1793943583568) -CAI, Cairo Int'l, 2, major, POINT(31.3997430067114 30.1119904385575) -CAS, Casablanca-Anfa, 2, mid, POINT(-7.66321880771143 33.5627883851079) -CCS, Simón Bolivar Int'l, 2, mid, POINT(-67.0057488076316 10.5973549146064) -CPT, Cape Town Int'l, 2, major, POINT(18.5976565083138 -33.9704466120395) -CTU, Chengdushuang Liu, 2, major, POINT(103.956136481695 30.5810712647464) -DEL, Indira Gandhi Int'l, 2, major, POINT(77.0878362565332 28.5592039760586) -DEN, Denver Int'l, 2, major, POINT(-104.673797338542 39.8494613881509) -DFW, Dallas-Ft. Worth Int'l, 2, major, POINT(-97.0403710741144 32.9001505594816) -DMK, Don Muang Int'l, 2, major, POINT(100.602578626505 13.9202766010347) -DXB, Dubai Int'l, 2, major, POINT(55.3540769172243 25.2525655938182) -EWR, Newark Int'l, 2, major, POINT(-74.1771472796706 40.6904798278929) -EZE, Ministro Pistarini Int'l, 2, major, POINT(-58.5412456939382 -34.8136469380323) -FLL, Fort Lauderdale Hollywood Int'l, 2, major, POINT(-80.1452588465189 26.0717095746827) -IAH, George Bush Intercontinental, 2, major, POINT(-95.3337047912947 29.9865909034907) -IST, Atatürk Hava Limani, 2, major, POINT(28.8195493087893 40.9778388177797) -JNB, OR Tambo Int'l, 2, major, POINT(28.2319885648741 -26.1320953994887) -JNU, Juneau Int'l, 2, mid, POINT(-134.583573037872 58.3589441045951) -LAX, Los Angeles Int'l, 2, major, POINT(-118.402468548522 33.9441742543586) -LIN, Linate, 2, major, POINT(9.27996629691061 45.4603938456252) -MEL, Melbourne Int'l, 2, major, POINT(144.848998091131 -37.6699411967893) -MEX, Lic Benito Juarez Int'l, 2, major, POINT(-99.0826079514239 19.4354695720494) -MNL, Ninoy Aquino Int'l, 2, major, POINT(121.004122083437 14.5068323762967) -NBO, Jomo Kenyatta Int'l, 2, major, POINT(36.9250887490365 -1.33052964350634) -HNL, Honolulu Int'l, 2, major, POINT(-157.919783173755 21.332022315024) -ORD, Chicago O'Hare Int'l, 2, major, POINT(-87.90513439065 41.9765291023803) -RUH, King Khalid Int'l, 2, major, POINT(46.701829023464 24.9590317436512) -SCL, Arturo Merino Benitez Int'l, 2, major, POINT(-70.7936860162974 -33.3968336342597) -SEA, Tacoma Int'l, 2, major, POINT(-122.302289722924 47.4435819127259) -SFO, San Francisco Int'l, 2, major, POINT(-122.383470344449 37.6170250868053) -SHA, Hongqiao, 2, major, POINT(121.341183788567 31.1872574314078) -SVO, Sheremtyevo, 2, major, POINT(37.4159690348414 55.966447172512) -YYZ, Toronto-Pearson Int'l, 2, major, POINT(-79.6114193247449 43.6809595186356) -SYD, Kingsford Smith, 2, major, POINT(151.166067305601 -33.9365832057717) -HEL, Helsinki Vantaa, 2, major, POINT(24.9682078665914 60.3187158912982) -CDG, Charles de Gaulle Int'l, 2, major, POINT(2.54186776739457 49.0144200969386) -TXL, Berlin-Tegel Int'l, 2, major, POINT(13.2903090925074 52.5544287044101) -VIE, Vienna Schwechat Int'l, 2, major, POINT(16.5607679642129 48.1197563052538) -FRA, Frankfurt Int'l, 2, major, POINT(8.57182286907608 50.0506770895207) -FCO, Leonardo da Vinci Int'l, 2, major, POINT(12.2501008973638 41.7950786307394) -ITM, Osaka Int'l, 2, major, POINT(135.442475256249 34.7901980848749) -GMP, Gimpo Int'l, 2, major, POINT(126.802392860276 37.5573005399508) -OSL, Oslo Gardermoen, 2, major, POINT(11.0991032762581 60.1935783171386) -BSB, Juscelino Kubitschek Int'l, 2, major, POINT(-47.9207885133625 -15.8699985002824) -CGH, Congonhas Int'l, 2, major, POINT(-46.6591155302196 -23.62685882701) -GIG, Rio de Janeiro-Antonio Carlos Jobim Int'l, 2, major, POINT(-43.2483813790683 -22.8123437125006) -MAD, Madrid Barajas, 2, major, POINT(-3.56902665458863 40.4681282733923) -SJU, Luis Muñoz Marin, 2, major, POINT(-66.0042299757548 18.4380770734949) -ARN, Arlanda, 2, major, POINT(17.9307299016916 59.6511203397372) -CGK, Soekarno-Hatta Int'l, 2, major, POINT(106.654296151172 -6.1266029559729) -ATH, Eleftherios Venizelos Int'l, 2, major, POINT(23.9471160554073 37.9362331299254) +abbrev:keyword,name:text, scalerank:integer,type:keyword, location:geo_point, country:keyword, city:keyword, city_location:geo_point +LUH, Sahnewal, 9, small, POINT(75.9570722403652 30.8503598561702), India, Ludhiāna, POINT(75.85 30.91) +SSE, Solapur, 9, mid, POINT(75.9330597710755 17.625415183635), India, Solāpur, POINT(75.92 17.68) +IXR, Birsa Munda, 9, mid, POINT(85.3235970368767 23.3177245989962), India, Rānchi, POINT(85.33 23.36) +AWZ, Ahwaz, 9, mid, POINT(48.7471065435931 31.3431585560757), Iran, Ahvāz, POINT(48.6692 31.3203) +GWL, Gwalior, 9, [mid,military], POINT(78.2172186546348 26.285487697937), India, Gwalior, POINT(78.178 26.2215) +HOD, Hodeidah Int'l, 9, mid, POINT(42.97109630194 14.7552534413725), Yemen, Al Ḩudaydah, POINT(42.9511 14.8022) +IDR, Devi Ahilyabai Holkar Int'l, 9, mid, POINT(75.8092915005895 22.727749187571), India, Indore, POINT(75.8472 22.7167) +ISK, Gandhinagar, 9, mid, POINT(73.8105674924689 19.9660205672806), India, Nāsik, POINT(73.78 20.0) +IXC, Chandigarh Int'l, 9, [major,military], POINT(76.8017261105242 30.6707248949667), India, Chandīgarh, POINT(76.78 30.75) +IXU, Aurangabad, 9, mid, POINT(75.3958432922005 19.8672969621082), India, Jālna, POINT(75.8864 19.841) +LYP, Faisalabad Int'l, 9, [mid,military], POINT(72.9878190922305 31.3627435480862), Pakistan, Faisalabad, POINT(73.0911 31.4167) +OMS, Omsk Tsentralny, 9, mid, POINT(73.3163595376585 54.9576482934059), Russia, Omsk, POINT(73.3833 54.9667) +OVB, Novosibirsk Tolmachev, 9, mid, POINT(82.6671524525865 55.0095847136264), Russia, Novosibirsk, POINT(82.9167 55.0333) +OZH, Zaporozhye Int'l, 9, [mid,military], POINT(35.3018728575279 47.8732635579023), Ukraine, Zaporizhzhia, POINT(35.1175 47.85) +PKU, Simpang Tiga, 9, mid, POINT(101.446569298441 0.464600872998505), Indonesia, Pekanbaru, POINT(101.4453 0.5092) +ROP, Rota Int'l, 9, mid, POINT(145.243980298582 14.1717712971216), Northern Mariana Islands, Capitol Hill, POINT(145.7546 15.2137) +SGC, Surgut, 9, mid, POINT(73.4084964764375 61.3401672194481), Russia, Surgut, POINT(73.4333 61.25) +TRZ, Tiruchirappalli, 9, mid, POINT(78.7089578747476 10.7603571306554), India, Trichinopoly, POINT(78.7047 10.7903) +TUK, Turbat Int'l, 9, mid, POINT(63.0279333519181 25.988794590011), Pakistan, Turbat, POINT(63.0544 26.0031) +UET, Quetta Int'l, 9, mid, POINT(66.9487311480949 30.249043186181), Pakistan, Quetta, POINT(67.0 30.1833) +ZAH, Zahedan Int'l, 9, mid, POINT(60.900708564915 29.4752941956573), Iran, Zāhedān, POINT(60.8628 29.4964) +MLG, Abdul Rachman Saleh, 9, [mid,military], POINT(112.711418617258 -7.92998002840567), Indonesia, Malang, POINT(112.62 -7.98) +BAX, Barnaul, 9, mid, POINT(83.5504532124038 53.3633850813046), Russia, Barnaul, POINT(83.75 53.3333) +VIAX, Adampur, 9, [military,mid], POINT(75.7584828456005 31.4329422397715), India, Jalandhar, POINT(75.4432 31.2569) +VIBY, Bareilly, 9, military, POINT(79.452002687657 28.4218087161144), India, Bareilly, POINT(79.415 28.364) +OPQS, Dhamial, 9, small, POINT(73.0320498392002 33.5614146278861), Pakistan, Rawalpindi, POINT(73.0333 33.6) +CJJ, Cheongju Int'l, 9, major, POINT(127.495916124681 36.7220227766673), South Korea, Cheongju, POINT(127.4833 36.6333) +KWJ, Gwangju, 9, [mid,military], POINT(126.810839481226 35.1400051390198), South Korea, Naju, POINT(126.7167 35.0333) +TAE, Daegu Int'l, 9, mid, POINT(128.637537699933 35.8999277969087), South Korea, Daegu, POINT(128.6017 35.8717) +USN, Ulsan, 9, mid, POINT(129.355731047528 35.5928957527107), South Korea, Ulsan, POINT(129.3167 35.55) +WIIT, Radin Inten II, 9, mid, POINT(105.176060419161 -5.242566777132), Indonesia, Bandar Lampung, POINT(105.2667 -5.45) +IXD, Allahabad, 9, military, POINT(81.7317271462187 25.443522027821), India, Prayagraj, POINT(81.8464 25.4358) +CEK, Chelyabinsk, 9, mid, POINT(61.5122589740201 55.2977919496055), Russia, Chelyabinsk, POINT(61.4 55.15) +TNN, Tainan, 8, [military,mid], POINT(120.209733318093 22.950667918347), Taiwan, Tainan, POINT(120.1833 22.9833) +RMQ, Taichung, 8, [military,mid], POINT(120.630703547584 24.2666555567115), Taiwan, Taichung, POINT(120.6794 24.1439) +RTM, Rotterdam The Hague, 8, mid, POINT(4.43384434962876 51.9491301899382), Netherlands, The Hague, POINT(4.31 52.08) +VOZ, Voronezh-Chertovitskoye, 8, mid, POINT(39.2254496447973 51.8126171268344), Russia, Voronezh, POINT(39.2106 51.6717) +LPL, Liverpool John Lennon, 8, major, POINT(-2.85862065784938 53.3363751054422), United Kingdom, Liverpool, POINT(-2.9919 53.4075) +VTZ, Vishakapatnam, 8, mid, POINT(83.2235216387465 17.7279577384364), India, Vishākhapatnam, POINT(83.2978 17.7042) +UPG, Sultan Hasanuddin Int'l, 8, major, POINT(119.545691342151 -5.05893689455779), Indonesia, Makassar, POINT(119.4362 -5.1619) +VAV, Vava'u Int'l, 8, mid, POINT(-173.968093944159 -18.5860058550654), Tonga, Neiafu, POINT(-173.9831 -18.6508) +NCL, Newcastle Int'l, 8, major, POINT(-1.71034578407216 55.037084860802), United Kingdom, Gateshead, POINT(-1.6 54.95) +LCE, Goloson Int'l, 8, mid, POINT(-86.8514685020011 15.7451596659126), Honduras, La Ceiba, POINT(-86.7931 15.7792) +MED, Madinah Int'l, 8, major, POINT(39.6991359560417 24.5442339605661), Saudi Arabia, Badr Ḩunayn, POINT(38.7906 23.78) +YMX, Mirabel Int'l, 8, mid, POINT(-74.0287382984814 45.6832250979267), Canada, Montréal, POINT(-73.5617 45.5089) +PLQ, Palanga Int'l, 8, mid, POINT(21.0974463986251 55.9713426235358), Lithuania, Klaipėda, POINT(21.1667 55.75) +JAI, Jaipur Int'l, 8, mid, POINT(75.8010104192668 26.8211798100605), India, Nawai, POINT(75.924 26.3824) +IXW, Sonari, 8, mid, POINT(86.1724662363776 22.8154145110242), India, Jamshedpur, POINT(86.1842 22.7925) +YEI, Yenisehir, 8, mid, POINT(29.54492 40.2555395007473), Turkey, İnegöl, POINT(29.5097 40.0806) +ADA, Şakirpaşa, 8, major, POINT(35.2969614268338 36.9852090641795), Turkey, Adana, POINT(35.3213 37.0) +ADQ, Kodiak, 8, mid, POINT(-152.485638515235 57.7485921070483), , , +AMA, Amarillo Int'l, 8, major, POINT(-101.705352772697 35.2184031919398), United States, Amarillo, POINT(-101.8316 35.1984) +ASP, Alice Springs, 8, mid, POINT(133.902918 -23.801968), Australia, Alice Springs, POINT(133.8667 -23.7) +ATQ, Raja Sansi Int'l, 8, [mid,military], POINT(74.8071559719824 31.7068220258888), Pakistan, Lahore, POINT(74.3436 31.5497) +BBI, Biju Patnaik, 8, mid, POINT(85.8168899544429 20.2526659754734), India, Bhubaneshwar, POINT(85.84 20.27) +BET, Bethel, 8, mid, POINT(-161.83898695944 60.7787379834088), , , +BGA, Palonegro, 8, mid, POINT(-73.1809207725361 7.12770915402685), Colombia, Bucaramanga, POINT(-73.0 7.1333) +BHM, Birmingham Int'l, 8, major, POINT(-86.7523773615462 33.5618672828058), United States, Hoover, POINT(-86.8068 33.3763) +BHQ, Broken Hill, 8, mid, POINT(141.470407303097 -31.998996737463), Australia, Broken Hill, POINT(141.4667 -31.95) +BIL, Logan Int'l, 8, major, POINT(-108.536929388125 45.8036855715278), United States, Billings, POINT(-108.5526 45.7891) +BIS, Bismarck Muni., 8, mid, POINT(-100.757471303717 46.7751066661614), United States, Bismarck, POINT(-100.7694 46.8143) +BJX, Del Bajio Int'l, 8, mid, POINT(-101.478753382467 20.9858871211938), Mexico, León de los Aldama, POINT(-101.6833 21.1167) +BNI, Benin, 8, mid, POINT(5.603682560067 6.31716689207818), Nigeria, Benin City, POINT(5.6222 6.3333) +BOJ, Bourgas, 8, major, POINT(27.5164093662953 42.5670835487702), Bulgaria, Burgas, POINT(27.4702 42.503) +BRE, Bremen, 8, major, POINT(8.7858617703132 53.052287104156), Germany, Bremen, POINT(8.8 53.0833) +BRM, Jacinto Lara Int'l, 8, mid, POINT(-69.356102 10.0453), Venezuela, Barquisimeto, POINT(-69.3467 10.0678) +BRO, Brownsville-South Padre Island Int'l, 8, mid, POINT(-97.431765340232 25.9062743545347), Mexico, Heroica Matamoros, POINT(-97.5042 25.8797) +BRS, Bristol Int'l, 8, major, POINT(-2.71086469134308 51.3862934189148), United Kingdom, Caerdydd, POINT(-3.1792 51.4817) +BTR, Baton Rouge Metro, 8, major, POINT(-91.1567544048105 30.5326138040586), United States, Baton Rouge, POINT(-91.1311 30.442) +BTS, Bratislava-M.R. Štefánik, 8, major, POINT(17.1999850022208 48.1698379062535), Slovakia, Bratislava, POINT(17.1097 48.1439) +BTV, Burlington Int'l, 8, mid, POINT(-73.1550787790668 44.4692066040732), United States, South Burlington, POINT(-73.2202 44.4622) +CAE, Columbia Metro, 8, major, POINT(-81.1093352429377 33.9342054584275), United States, West Columbia, POINT(-81.0936 33.9932) +CCJ, Calicut Int'l, 8, major, POINT(75.950993063051 11.1395520526064), India, Calicut, POINT(75.77 11.25) +CCK, Cocos (Keeling) Islands, 8, mid, POINT(96.8287472144207 -12.1851585953293), , , +CFU, Corfu Int'l (Ioannis Kapodistrias), 8, mid, POINT(19.9147561641662 39.6067554505259), Greece, Kérkyra, POINT(19.9214 39.6239) +CGQ, Changchun Longjia Int'l, 8, major, POINT(125.690456812998 43.993011479577), China, Changchun, POINT(125.2 43.9) +CHS, Charleston Int'l, 8, [major,military], POINT(-80.0369337438262 32.8845301562965), United States, North Charleston, POINT(-80.0722 32.9067) +CJB, Coimbatore, 8, mid, POINT(77.038893772262 11.0301415125983), India, Coimbatore, POINT(76.9558 11.0168) +CLJ, Someseni, 8, mid, POINT(23.6869812680486 46.7826626340243), Romania, Cluj-Napoca, POINT(23.5833 46.7667) +CMW, Ignacio Agramonte, 8, mid, POINT(-77.8451039935167 21.4247037281961), Cuba, Camagüey, POINT(-77.9075 21.3839) +CPR, Casper/Natrona County Int'l, 8, major, POINT(-106.464444809692 42.8971900483006), United States, Casper, POINT(-106.3208 42.842) +CRK, Clark Int'l, 8, major, POINT(120.550770223914 15.1876422423888), Philippines, Angeles City, POINT(120.5847 15.1472) +CRW, Yeager, 8, [major,military], POINT(-81.5964164667526 38.3705914372865), United States, South Charleston, POINT(-81.7155 38.3426) +CTA, Catania Fontanarossa, 8, major, POINT(15.0674605007053 37.470072800341), Italy, Catania, POINT(15.0903 37.5) +CTM, Chetumal Int'l, 8, mid, POINT(-88.3242600415365 18.506434233376), Mexico, Chetumal, POINT(-88.3053 18.5036) +CWL, Cardiff, 8, major, POINT(-3.33956876429118 51.3986220911017), United Kingdom, Caerdydd, POINT(-3.1792 51.4817) +CYB, Gerrard Smith, 8, mid, POINT(-79.879461638003 19.6898653962844), Cuba, Ciego de Ávila, POINT(-78.7631 21.8481) +CZM, Cozumel Int'l, 8, mid, POINT(-86.9304064070436 20.5115543771647), Mexico, Cozumel, POINT(-86.9493 20.5104) +DAY, James M. Cox Dayton Int'l, 8, major, POINT(-84.2204594238102 39.8990402865362), United States, Vandalia, POINT(-84.193 39.8791) +DBO, Dubbo, 8, mid, POINT(148.569717 -32.218681), Australia, Dubbo, POINT(148.6011 -32.2569) +DCA, Washington Nat'l, 8, major, POINT(-77.0433373925631 38.8537162012123), United States, Waldorf, POINT(-76.9194 38.6085) +DGO, Durango Int'l, 8, mid, POINT(-104.533846024964 24.1261948326182), Mexico, Colonia General Felipe Ángeles, POINT(-104.6 23.9167) +DNK, Voloskoye, 8, mid, POINT(35.0939060224975 48.3675718021117), Ukraine, Dnipro, POINT(35.04 48.4675) +DOK, Donetsk, 8, major, POINT(37.7448085572103 48.0691671285582), Ukraine, Makiivka, POINT(37.9611 48.0556) +DZO, Santa Bernardina Int'l, 8, mid, POINT(-56.4992636213744 -33.3591084475501), Uruguay, Durazno, POINT(-56.5167 -33.3667) +EDI, Edinburgh Int'l, 8, major, POINT(-3.36428468513554 55.9485540113639), United Kingdom, Dunfermline, POINT(-3.4393 56.0719) +EIS, Terrance B. Lettsome Int'l, 8, mid, POINT(-64.5371514365794 18.4443618557983), British Virgin Islands, Road Town, POINT(-64.6167 18.4167) +EKO, Elko Reg., 8, mid, POINT(-115.786479232249 40.8276058815225), United States, Elko, POINT(-115.7678 40.8381) +ESE, Ensenada, 8, mid, POINT(-116.595724400418 31.7977139760569), Mexico, Rodolfo Sánchez Taboada, POINT(-116.5911 31.7958) +FAE, Vágar, 8, mid, POINT(-7.2708 62.0625), Faroe Islands, Tórshavn, POINT(-6.7833 62.0) +FAR, Hector Int'l, 8, [mid,military], POINT(-96.8254561269675 46.9198178811323), United States, Fargo, POINT(-96.8292 46.8651) +FAT, Fresno Yosemite Int'l, 8, mid, POINT(-119.720001323576 36.7698128373959), United States, Hanford, POINT(-119.6549 36.3274) +FLG, Flagstaff Pulliam, 8, mid, POINT(-111.674656171675 35.1389116757976), United States, Flagstaff, POINT(-111.6194 35.1872) +FRS, Mundo Maya Int'l, 8, mid, POINT(-89.8778404226508 16.9149741642226), Guatemala, El Chal, POINT(-89.65 16.6333) +FSD, Sioux Falls Reg., 8, mid, POINT(-96.7313831017541 43.5801934972763), United States, Sioux Falls, POINT(-96.7311 43.5396) +GEG, Spokane Int'l, 8, major, POINT(-117.536836628585 47.6254781278368), United States, Spokane, POINT(-117.433 47.6671) +GGT, Exuma Int'l, 8, mid, POINT(-75.872613085304 23.5638829069259), The Bahamas, Nassau, POINT(-77.3386 25.0781) +GIB, Gibraltar, 8, mid, POINT(-5.34677180033388 36.1512747504173), Gibraltar, Gibraltar, POINT(-5.35 36.1333) +GRR, Gerald R. Ford Int'l, 8, mid, POINT(-85.529573207274 42.8847776020908), United States, Kentwood, POINT(-85.5926 42.8852) +GSO, Triad Int'l, 8, major, POINT(-79.9364867577484 36.1053781998932), United States, Greensboro, POINT(-79.8271 36.0956) +GTF, Great Falls Int'l, 8, mid, POINT(-111.35668472784 47.482270729263), United States, Bozeman, POINT(-111.0558 45.6833) +GZT, Gaziantep Oğuzeli Int'l, 8, major, POINT(37.47380325219 36.9453633446875), Turkey, Gaziantep, POINT(37.3792 37.0628) +HBX, Hubli, 8, mid, POINT(75.0863155680281 15.3591833386229), India, Saundatti, POINT(75.1167 15.7833) +HDY, Hat Yai Int'l, 8, mid, POINT(100.393751274671 6.93634231940664), Thailand, Hat Yai, POINT(100.4667 7.0167) +HFE, Hefei-Luogang, 8, mid, POINT(117.304197015888 31.7798576795778), China, Hefei, POINT(117.2808 31.8639) +HRG, Hurghada Int'l, 8, major, POINT(33.8071606414118 27.1804260918186), Egypt, Al Ghardaqah, POINT(33.8117 27.2578) +HRK, Kharkov Int'l, 8, major, POINT(36.2822010773503 49.9215360631551), Ukraine, Kharkiv, POINT(36.2311 49.9925) +HSV, Huntsville Int'l, 8, major, POINT(-86.7749430563373 34.6483344609319), United States, Hartselle, POINT(-86.9396 34.4391) +IBA, Ibadan, 8, mid, POINT(3.9738133433229 7.36034397269393), Nigeria, Ibadan, POINT(3.9167 7.3964) +ICT, Kansas City Int'l, 8, major, POINT(-97.4287387683976 37.6529279603903), United States, Wichita, POINT(-97.3443 37.6895) +ILM, Wilmington Int'l, 8, mid, POINT(-77.9103756560469 34.2667840671996), United States, Murraysville, POINT(-77.8429 34.2919) +ILR, Ilorin Int'l, 8, mid, POINT(4.49484038819934 8.43537651935241), Nigeria, Ilorin, POINT(4.55 8.5) +INL, Falls Int'l, 8, mid, POINT(-93.3980027552794 48.5659930848414), United States, Hibbing, POINT(-92.9486 47.398) +INV, Inverness, 8, mid, POINT(-4.06359175587141 57.5395002923424), United Kingdom, Nairn, POINT(-3.869 57.586) +IPL, Imperial Cty., 8, mid, POINT(-115.57199556658 32.8339586685524), Mexico, Mexicali, POINT(-115.4678 32.6633) +IXJ, Jammu, 8, mid, POINT(74.8423077638915 32.6810428886225), India, Jammu, POINT(74.87 32.73) +IXM, Madurai, 8, mid, POINT(78.0911394937194 9.83718627877566), India, Madurai, POINT(78.1198 9.9252) +JDH, Jodhpur, 8, [major,military], POINT(73.0505491895671 26.2637623458351), India, Jodhpur, POINT(73.02 26.28) +JLR, Jabalpur, 8, mid, POINT(80.0587438885277 23.1845328746465), India, Jabalpur, POINT(79.9333 23.1667) +JRO, Kilimanjaro Int'l, 8, mid, POINT(37.0651896067748 -3.42444495998178), Tanzania, Arusha, POINT(36.6833 -3.3667) +KAD, Kaduna, 8, mid, POINT(7.32525347407434 10.6946192862391), Nigeria, Kaduna, POINT(7.4333 10.5167) +KGA, Kananga, 8, mid, POINT(22.4783332482689 -5.90016656227041), Congo (Kinshasa), Kananga, POINT(22.4488 -5.897) +KMS, Kumasi, 8, mid, POINT(-1.59257526582361 6.71460638750365), Ghana, Kumasi, POINT(-1.6167 6.6667) +KNA, Viña del Mar, 8, mid, POINT(-71.4806025354969 -32.948391765136), Chile, Viña del Mar, POINT(-71.5517 -33.0244) +KNU, Kanpur, 8, mid, POINT(80.3675338772002 26.4388334467042), India, Cawnpore, POINT(80.3319 26.4499) +KOA, Kona Int'l at Keahole, 8, mid, POINT(-156.040889471058 19.7370991399442), United States, Waimea, POINT(-155.6381 20.0124) +KOI, Kirkwall, 8, mid, POINT(-2.90137849524508 58.9544288788303), United Kingdom, Kirkwall, POINT(-2.96 58.981) +KTU, Kota, 8, mid, POINT(75.8504977944552 25.1634187166743), India, Būndi, POINT(75.6372 25.4383) +KYA, Konya, 8, [major,military], POINT(32.5756732669687 37.9839945531695), Turkey, Konya, POINT(32.4833 37.8667) +LEX, Blue Grass, 8, major, POINT(-84.5982681918786 38.0374273181372), United States, Nicholasville, POINT(-84.5668 37.8906) +LIH, Lihue, 8, mid, POINT(-159.349084290522 21.9781243162088), United States, Kapaa, POINT(-159.3521 22.091) +LIT, Clinton National, 8, major, POINT(-92.2205881319289 34.7284300415179), United States, Little Rock, POINT(-92.3577 34.7256) +LMM, Los Mochis, 8, mid, POINT(-109.082694645261 25.688508826099), Mexico, Los Mochis, POINT(-108.9937 25.7835) +LOV, Venustiano Carranza Int'l, 8, mid, POINT(-101.464960031751 26.9553927160699), Mexico, Monclova, POINT(-101.4222 26.9103) +LRD, Laredo Int'l, 8, mid, POINT(-99.4556603976513 27.5436657175825), Mexico, Nuevo Laredo, POINT(-99.5069 27.4861) +LSI, Sumburgh, 8, mid, POINT(-1.28806068838753 59.8766899598999), United Kingdom, Lerwick, POINT(-1.145 60.155) +LTK, Bassel Al-Assad Int'l, 8, major, POINT(35.9442407096663 35.4073114596744), Syria, Latakia, POINT(35.7833 35.5167) +LTN, London Luton, 8, major, POINT(-0.376227267397439 51.8802952570969), United Kingdom, Luton, POINT(-0.4147 51.8783) +LYR, Svalbard Longyear, 8, mid, POINT(15.495229 78.246717), Svalbard, Longyearbyen, POINT(15.6333 78.2167) +MBJ, Sangster Int'l, 8, mid, POINT(-77.9183907635752 18.5011549298249), Jamaica, Montego Bay, POINT(-77.9167 18.4667) +MDL, Mandalay Int'l, 8, mid, POINT(95.9706535950217 21.7055490680274), Myanmar, Mandalay, POINT(96.0844 21.9831) +MDW, Chicago Midway Int'l, 8, major, POINT(-87.7421266885612 41.7883492597409), United States, Chicago, POINT(-87.6866 41.8375) +MEC, Eloy Alfaro Int'l, 8, [mid,military], POINT(-80.6833845995774 -0.949557002112883),Ecuador, Portoviejo, POINT(-80.4553 -1.0561) +MGM, Montgomery Reg., 8, major, POINT(-86.3903074602686 32.3045879909631), United States, Prattville, POINT(-86.4573 32.4597) +MHT, Manchester-Boston Reg., 8, major, POINT(-71.4375239091857 42.9279139945886), United States, Nashua, POINT(-71.491 42.7491) +DNMA, Maiduguri Int'l, 8, mid, POINT(13.0851390162471 11.8534713188527), Nigeria, Maiduguri, POINT(13.15 11.8333) +MJM, Mbuji Mayi, 8, mid, POINT(23.5721091989052 -6.12484541348812), Congo (Kinshasa), Mbuji-Mayi, POINT(23.6 -6.15) +MOT, Minot Int'l, 8, mid, POINT(-101.2913855313 48.2556049212839), United States, Minot, POINT(-101.278 48.2375) +MSO, Missoula Int'l, 8, mid, POINT(-114.083694923651 46.9187604768831), United States, Missoula, POINT(-114.0214 46.8751) +MXL, Gen R.S. Taboada Int'l, 8, mid, POINT(-115.247874047841 32.6285643324607), Mexico, Mexicali, POINT(-115.4678 32.6633) +MXP, Malpensa, 8, major, POINT(8.71295953502437 45.6274405140381), Italy, Gallarate, POINT(8.7914 45.6649) +NLK, Norfolk Island, 8, mid, POINT(167.943394116205 -29.0351592555275), , , +NUE, Nurnberg, 8, major, POINT(11.0774179739096 49.4945052170345), Germany, Nuremberg, POINT(11.0775 49.4539) +ODS, Odessa Int'l, 8, major, POINT(30.6768308310206 46.4406268759106), Ukraine, Odesa, POINT(30.7326 46.4775) +OOL, Gold Coast, 8, mid, POINT(153.512876264303 -28.1665168540202), Australia, Gold Coast, POINT(153.4 -28.0167) +ORN, Oran Es Senia, 8, mid, POINT(-0.60679696443112 35.6202747312734), Algeria, Oran, POINT(-0.6331 35.6969) +PAT, Lok Nayak Jaiprakash, 8, mid, POINT(85.0909021314663 25.5944434295605), India, Patna, POINT(85.1 25.6) +PDU, Paysandu, 8, mid, POINT(-58.0685346825257 -32.3614545292723), Uruguay, Paysandú, POINT(-58.0756 -32.3214) +PFO, Paphos Int'l, 8, major, POINT(32.4832322064926 34.7134012817335), Cyprus, Paphos, POINT(32.4167 34.7667) +PLM, Sultan Mahmud Badaruddin II, 8, mid, POINT(104.699128326762 -2.89999345005997), Indonesia, Palembang, POINT(104.7556 -2.9861) +PTG, Polokwane Int'l, 8, mid, POINT(29.4533403645644 -23.858986270166), South Africa, Polokwane, POINT(29.45 -23.9) +PUJ, Punta Cana, 8, mid, POINT(-68.3632351074649 18.563039033987), Dominican Republic, Pantanal, POINT(-68.3667 18.5333) +QRO, Queretaro Int'l, 8, mid, POINT(-100.18735943003 20.622466071278), Mexico, Querétaro, POINT(-100.3928 20.5875) +RAJ, Rajkot, 8, mid, POINT(70.7799548311565 22.3092816988361), India, Rājkot, POINT(70.7833 22.3) +RIC, Richmond Int'l, 8, major, POINT(-77.333119638113 37.5082899750901), United States, Highland Springs, POINT(-77.3285 37.5516) +RJH, Shah Makhdum, 8, mid, POINT(88.6138045704431 24.4448068623035), Bangladesh, Rājshāhi, POINT(88.6 24.3667) +ROC, Greater Rochester Int'l, 8, major, POINT(-77.6652445062197 43.1275519826482), United States, Greece, POINT(-77.6988 43.246) +ROK, Rockhampton, 8, mid, POINT(150.478897 -23.378599), Australia, Gracemere, POINT(150.4558 -23.4391) +ROV, Rostov-on-Don, 8, mid, POINT(39.8035144445391 47.2551119519754), Russia, Bataysk, POINT(39.7333 47.1667) +RTW, Saratov, 8, mid, POINT(46.035023249891 51.5606456508842), Russia, Saratov, POINT(46.0167 51.5333) +SAP, Ramón Villeda Morales Int'l, 8, mid, POINT(-87.9272365125409 15.4558630524883), Honduras, San Pedro Sula, POINT(-88.0333 15.5) +SBA, Santa Barbara Muni., 8, mid, POINT(-119.8366015808 34.4257312978783), United States, Goleta, POINT(-119.8594 34.4361) +SCC, Deadhorse, 8, mid, POINT(-148.457855 70.19751), , , +SFJ, Kangerlussuaq, 8, mid, POINT(-50.694199 67.018097), Greenland, Ilulissat, POINT(-51.1 69.2167) +SGF, Springfield Reg., 8, major, POINT(-93.3826379012003 37.2421444903024), United States, Republic, POINT(-93.4446 37.1452) +SHV, Shreveport Reg., 8, major, POINT(-93.8285222229503 32.4545798866513), United States, Shreveport, POINT(-93.7955 32.4653) +SIP, Simferopol Int'l, 8, major, POINT(33.9960529244537 45.0202173978165), Ukraine, Hvardiiske, POINT(34.0142 45.1142) +SIT, Sitka Rocky Gutierrez, 8, mid, POINT(-135.365692 57.05349), United States, Sitka, POINT(-135.3152 57.2401) +SJD, Los Cabos Int'l, 8, major, POINT(-109.717858386909 23.1626574483597), Mexico, San José del Cabo, POINT(-109.7081 23.0614) +SLE, McNary Field, 8, major, POINT(-123.007871479404 44.9105138452142), United States, Keizer, POINT(-123.0243 45.0028) +SLW, Plan de Guadalupe, 8, mid, POINT(-100.932260548587 25.5479976419974), Mexico, Saltillo, POINT(-100.9919 25.4231) +SNN, Shannon, 8, major, POINT(-8.92242885557686 52.6934537102532), Ireland, Shannon, POINT(-8.8686 52.7137) +SON, Santo Pekoa Int'l, 8, mid, POINT(167.220894919375 -15.5055387370858), Vanuatu, Luganville, POINT(167.1667 -15.5333) +SRG, Achmad Yani, 8, mid, POINT(110.378556255666 -6.97873484956982), Indonesia, Semarang, POINT(110.4167 -6.9667) +SXR, Srinagar, 8, [military,mid], POINT(74.7826243672311 33.9830909431623), India, Bāramūla, POINT(74.34 34.2) +TAP, Tapachula Int'l, 8, mid, POINT(-92.370003 14.7911281338773), Mexico, Tapachula, POINT(-92.2667 14.9) +TGD, Podgorica, 8, major, POINT(19.2466868618873 42.3679335195428), Montenegro, Podgorica, POINT(19.2629 42.4413) +TLH, Tallahassee Reg., 8, major, POINT(-84.3449953984858 30.3955576176938), United States, Tallahassee, POINT(-84.2527 30.4551) +TRN, Turin Int'l, 8, major, POINT(7.64416230362133 45.1916600734642), Italy, Turin, POINT(7.6761 45.0792) +TYN, Taiyuan Wusu Int'l, 8, major, POINT(112.625891539315 37.7545117791512), China, Taiyuan, POINT(112.5425 37.8733) +UAK, Narsarsuaq, 8, mid, POINT(-45.4164008923108 61.1625968337328), Greenland, Narsarsuaq, POINT(-45.4347 61.1458) +UTP, U-Tapao, 8, [military,mid], POINT(101.00020929048 12.6852930912664), Thailand, Phatthaya, POINT(100.889 12.9357) +VFA, Victoria Falls, 8, mid, POINT(25.8467677208826 -18.0990155983682), Zambia, Livingstone, POINT(25.8667 -17.85) +VGA, Vijaywada, 8, mid, POINT(80.7973080000675 16.528642778235), India, Vijayavāda, POINT(80.6305 16.5193) +VNS, Varanasi, 8, mid, POINT(82.8538741913527 25.4499077329822), India, Vārānasi, POINT(83.0128 25.3189) +VRA, Juan Gualberto Gomez, 8, major, POINT(-81.4367103850623 23.0395422339631), Cuba, Cárdenas, POINT(-81.2036 23.0428) +VSA, Villahermosa, 8, mid, POINT(-92.8190675836262 17.9930660113111), Mexico, Macuspana, POINT(-92.6 17.7667) +YBR, Brandon, 8, mid, POINT(-99.9458959002463 49.9047279410277), United States, Minot, POINT(-101.278 48.2375) +YED, CFB Edmonton, 8, [military,major], POINT(-113.478839054497 53.6749156618668), Canada, St. Albert, POINT(-113.6258 53.6303) +YFB, Iqaluit, 8, mid, POINT(-68.5367292441812 63.7511523537807), Canada, Iqaluit, POINT(-68.5107 63.7598) +YHM, John C. Munro Hamilton Int'l, 8, mid, POINT(-79.9264230959967 43.1633605305096), Canada, Brantford, POINT(-80.25 43.1667) +YMM, Fort McMurray, 8, mid, POINT(-111.223840046617 56.6563171390962), Canada, Wood Buffalo, POINT(-111.3284 57.6042) +YNT, Yantai, 8, [major,military], POINT(121.372047417773 37.4077044726924), China, Yantai, POINT(121.2664 37.3997) +YPE, Peace River, 8, mid, POINT(-117.443663208082 56.231924036745), Canada, Grande Prairie, POINT(-118.7947 55.1708) +YQM, Greater Moncton Int'l, 8, mid, POINT(-64.6886696807361 46.1162059639259), Canada, Moncton, POINT(-64.7714 46.1328) +YQY, Sydney/J.A. Douglas McCurdy, 8, mid, POINT(-60.0469372117026 46.1673405890504), Canada, Cape Breton, POINT(-60.1931 46.1389) +YRB, Resolute Bay, 8, mid, POINT(-94.9708023244006 74.7181860987594), , , +YSM, Fort Smith, 8, mid, POINT(-111.961059938158 60.0198749602443), Canada, Wood Buffalo, POINT(-111.3284 57.6042) +YTH, Thompson, 8, mid, POINT(-97.860733 55.797482), , , +YTS, Timmins, 8, mid, POINT(-81.372047 48.566158), Canada, Timmins, POINT(-81.3333 48.4667) +YUT, Repulse Bay, 8, mid, POINT(-86.25 66.533302), , , +YVP, Kuujjuaq, 8, mid, POINT(-68.433342 58.101959), , , +YWK, Wabush, 8, mid, POINT(-66.873009 52.926071), , , +YXD, Edmonton City Centre, 8, mid, POINT(-113.522973688581 53.5709436582812), Canada, St. Albert, POINT(-113.6258 53.6303) +YXJ, Fort St. John (N. Peace), 8, mid, POINT(-120.736439 56.246035), Canada, Fort St. John, POINT(-120.8476 56.2465) +YYB, North Bay/Jack Garland, 8, mid, POINT(-79.42491 46.358711), Canada, North Bay, POINT(-79.45 46.3) +ZAR, Zaria, 8, mid, POINT(7.68726764310577 11.1352958601071), Nigeria, Zaria, POINT(7.7 11.0667) +SKP, Skopje, 8, mid, POINT(21.6281971858229 41.9564546081544), Macedonia, Skopje, POINT(21.4317 41.9961) +VE23, Burnpur, 8, mid, POINT(86.974546776573 23.6312179107764), India, Āsansol, POINT(86.99 23.68) +VIDX, Hindon Air Force Station, 8, mid, POINT(77.3507888779117 28.7077968601071), India, Ghāziābād, POINT(77.42 28.67) +, Sunchon, 8, major, POINT(125.890825057486 39.4119659710565), North Korea, Sunch’ŏn, POINT(125.9333 39.4167) +EPLL, Łódź Władysław Reymont, 8, mid, POINT(19.4032148744037 51.72720704517), Poland, Łódź, POINT(19.4547 51.7769) +BXJ, Alma Ata N.W., 8, [mid,military], POINT(76.8782640096648 43.3554190837919), Kazakhstan, Almaty, POINT(76.8958 43.2775) +JMU, Jiamusi Dongjiao, 8, mid, POINT(130.456204704407 46.8430150223379), China, Jiamusi, POINT(130.3653 46.8081) +MDG, Mudanjiang Hailang, 8, major, POINT(129.58015153222 44.5342936299935), China, Mudanjiang, POINT(129.5997 44.5861) +ULMM, Severomorsk-3 (Murmansk N.E.), 8, [military,major], POINT(33.2903527616285 69.0168711826804), Russia, Murmansk, POINT(33.075 68.9706) +OSB, Mosul Int'l, 8, mid, POINT(43.145802 36.308601), Iraq, Mosul, POINT(43.13 36.34) +, Rostov N., 8, [military,mid], POINT(39.6353996343665 47.2774209202867), Russia, Bataysk, POINT(39.7333 47.1667) +, Rostov S.W., 8, mid, POINT(39.7972215345149 47.1158577255835), Russia, Bataysk, POINT(39.7333 47.1667) +OUL, Oulu, 8, mid, POINT(25.3728374704307 64.9287992358849), Finland, Oulu, POINT(25.4719 65.0142) +BOD, Bordeaux, 8, major, POINT(-0.701793449075243 44.8321108662674), France, Bordeaux, POINT(-0.58 44.84) +CEQ, Mandelieu, 8, mid, POINT(6.95431612028937 43.546097987045), France, Mandelieu-la-Napoule, POINT(6.9381 43.5464) +DOL, St Gatien, 8, mid, POINT(0.158653528230218 49.3616609986609), France, Le Havre, POINT(0.1 49.49) +LIL, Lille-Lesquin, 8, mid, POINT(3.10596499799813 50.5716423929581), France, Roubaix, POINT(3.1817 50.6901) +TLS, Toulouse-Blagnac, 8, major, POINT(1.37350918551153 43.6304625661601), France, Toulouse, POINT(1.444 43.6045) +FUK, Fukuoka, 8, major, POINT(130.444189541884 33.5848164332573), Japan, Fukuoka, POINT(130.4 33.5833) +HIW, Hiroshima-Nishi, 8, mid, POINT(132.419372741681 34.3713815628829), Japan, Hiroshima, POINT(132.4519 34.3914) +NKM, Nagoya, 8, mid, POINT(136.91962838414 35.2540532052867), Japan, Nagoya, POINT(136.9 35.1833) +SDJ, Sendai, 8, mid, POINT(140.930247381369 38.1382075615287), Japan, Sendai, POINT(140.8694 38.2682) +KKN, Kirkenes Hoybuktmoen, 8, mid, POINT(29.8913489500406 69.7238318113692), Russia, Nikel, POINT(30.2206 69.4081) +CGB, Marechal Rondon Int'l, 8, mid, POINT(-56.1201774754724 -15.6511470191955), Brazil, Várzea Grande, POINT(-56.1333 -15.65) +FLN, Hercilio Luz Int'l, 8, major, POINT(-48.5448122049599 -27.6646276941638), Brazil, Florianópolis, POINT(-48.4853 -27.6122) +JOI, Joinville-Lauro C. de Loyola, 8, mid, POINT(-48.8016498165616 -26.2242941374785), Brazil, Joinvile, POINT(-48.8437 -26.3204) +JPA, Presidente Castro Pinto Int'l, 8, mid, POINT(-34.9488925911125 -7.14617462402047), Brazil, João Pessoa, POINT(-34.88 -7.12) +NAT, Augusto Severo Int'l, 8, major, POINT(-35.2488410165389 -5.89912054477116), Brazil, Natal, POINT(-35.2 -5.7833) +OPO, Francisco Sa Carneiro, 8, major, POINT(-8.67127240719647 41.2368708920452), Portugal, Matosinhos, POINT(-8.7 41.1833) +SLZ, Marechal Cunha Machado Int'l, 8, mid, POINT(-44.2362344700492 -2.58350921043019), Brazil, São Luís, POINT(-44.3044 -2.5283) +SSZ, Santos Air Force Base, 8, [military,mid], POINT(-46.3052704931003 -23.9237590410637), Brazil, Santos, POINT(-46.325 -23.9369) +THE, Teresina-Senador Petronio Portella, 8, mid, POINT(-42.8212402317845 -5.06346299167191), Brazil, Teresina, POINT(-42.8042 -5.0949) +VCP, Viracopos-Campinas Int'l, 8, mid, POINT(-47.1410791911014 -23.0096239085339), Brazil, Campinas, POINT(-47.0608 -22.9058) +VIX, Eurico de Aguiar Salles, 8, mid, POINT(-40.2885368759913 -20.2574162759418), Brazil, Vitória, POINT(-40.3083 -20.2889) +ALC, Alicante, 8, major, POINT(-0.557230440363588 38.2866408993929), Spain, Alicante, POINT(-0.4831 38.3453) +LEI, Almeria, 8, mid, POINT(-2.3716014405912 36.8477672709643), Spain, Almería, POINT(-2.4681 36.8403) +VLC, Valencia, 8, mid, POINT(-0.473474930771676 39.4914597884489), Spain, Paterna, POINT(-0.4406 39.5028) +KRN, Kiruna_Airport, 8, mid, POINT(20.3351522954898 67.8256066056432), Sweden, Kiruna, POINT(20.3028 67.8489) +NRK, Norrköping Airport, 8, major, POINT(16.2339407695814 58.5833805017541), Sweden, Norrköping, POINT(16.2 58.6) +BDO, Husein Sastranegara Int'l, 8, mid, POINT(107.575611852209 -6.90042408353409), Indonesia, Cimahi, POINT(107.5548 -6.8712) +ROS, Rosario – Islas Malvinas Int'l, 8, mid, POINT(-60.7800787216586 -32.9162269743812), Argentina, Funes, POINT(-60.8167 -32.9167) +MCZ, Maceio/Zumbi dos Palmares Int'l, 8, mid, POINT(-35.7924951215833 -9.51494118540116), Brazil, Maceió, POINT(-35.735 -9.6658) +SSH, Sharm el-Sheikh Int'l, 8, mid, POINT(34.3901189267288 27.9804044199168), Egypt, Sharm ash Shaykh, POINT(34.3297 27.9122) +TCP, Taba Int'l, 8, mid, POINT(34.7758378996779 29.5944990568019), Jordan, Al ‘Aqabah, POINT(35.0056 29.5319) +AGR, Agra, 8, [major,military], POINT(77.960909176509 27.15772773475), India, Agwār, POINT(78.02 27.18) +BDQ, Vadodara, 8, mid, POINT(73.2262889533239 22.3361640021171), India, Vadodara, POINT(73.2 22.3) +KSH, Shahid Ashrafi Esfahani, 8, mid, POINT(47.1565835165639 34.3464167739108), Iran, Kermānshāh, POINT(47.065 34.3142) +BEN, Benina Int'l, 8, mid, POINT(20.2680398018516 32.0872774606553), Libya, Benghazi, POINT(20.0667 32.1167) +DHA, King Abdulaziz AB, 8, [military,major], POINT(50.1477245727844 26.2703680854768), Saudi Arabia, Dhahran, POINT(50.15 26.2667) +STY, Nueva Hespérides Int'l, 8, mid, POINT(-57.9840821176492 -31.4373883387798), Argentina, Federación, POINT(-57.9167 -30.9833) +BAIK, Baikonur Cosmodrome, 8, spaceport, POINT(63.307354423875 45.9635739403124), Kazakhstan, Baikonur, POINT(63.3167 45.6167) +KSC, Kennedy Space Center, 8, spaceport, POINT(-80.6369680911892 28.5163704772027), United States, Titusville, POINT(-80.8193 28.5727) +CSG, Centre Spatial Guyanais, 8, spaceport, POINT(-52.7684296893452 5.23941001258035), French Guiana, Kourou, POINT(-52.6499 5.16) +AUA, Queen Beatrix Int'l, 7, mid, POINT(-70.0076228563496 12.5034643630297), Aruba, Tanki Leendert, POINT(-70.022 12.5418) +JIB, Djibouti-Ambouli Int'l, 7, mid, POINT(43.1497127859956 11.5521018230172), Djibouti, Djibouti, POINT(43.145 11.5883) +IQQ, Diego Aracena Int'l, 7, [mid,military], POINT(-70.178635395533 -20.5478400878309), Chile, Iquique, POINT(-70.15 -20.2167) +SAW, Sabiha Gökçen Havaalani, 7, major, POINT(29.3095991423889 40.9043003553957), Turkey, Istanbul, POINT(28.955 41.0136) +KSA, Kosrae Island, 7, mid, POINT(162.957041225076 5.3520098571828), Federated States of Micronesia, Tofol, POINT(163.0086 5.3258) +FUN, Funafuti Int'l, 7, mid, POINT(179.19544202302 -8.52485415059424), Tuvalu, Funafuti, POINT(179.2 -8.5167) +NAG, Dr. Babasaheb Ambedkar Int'l, 7, mid, POINT(79.0537976421986 21.0899317630087), India, Nāgpur, POINT(79.0806 21.1497) +HKT, Phuket Int'l, 7, mid, POINT(98.3060384900559 8.10768475952735), Thailand, Phuket, POINT(98.3975 7.8881) +NAN, Nadi Int'l, 7, mid, POINT(177.451151198059 -17.7529129479792), Fiji, Nadi, POINT(177.4167 -17.8) +AGU, Lic. Jesús Terán Peredo Int'l, 7, mid, POINT(-102.314093740058 21.7013390329207), Mexico, Aguascalientes, POINT(-102.296 21.876) +ALL, Albenga, 7, mid, POINT(8.12314535436409 44.0458773598158), Italy, Albenga, POINT(8.2167 44.05) +AMM, Queen Alia Int'l, 7, major, POINT(35.989707162193 31.7226621600432), Jordan, Amman, POINT(35.9328 31.9497) +ARI, Chacalluta Int'l, 7, mid, POINT(-70.3357301410959 -18.3492061639579), Chile, Arica, POINT(-70.3167 -18.4667) +ATR, Atar Int'l, 7, mid, POINT(-13.0511704323315 20.4982706101565), Mauritania, Atar, POINT(-13.05 20.5167) +BAQ, Ernesto Cortissoz Int'l, 7, mid, POINT(-74.776555978265 10.8866775959414), Colombia, Barranquilla, POINT(-74.8019 10.9833) +BRC, Teniente Luis Candelaria Int'l, 7, mid, POINT(-71.1614300869763 -41.1459976958105), Argentina, San Carlos de Bariloche, POINT(-71.3 -41.15) +BYK, Bouaké, 7, mid, POINT(-5.06894222275311 7.73610495555032), Côte d'Ivoire, Bouaké, POINT(-5.0167 7.6833) +BZE, Philip S. W. Goldson Int'l, 7, major, POINT(-88.3082064033075 17.5360686575521), Belize, Belize City, POINT(-88.1886 17.4986) +CRP, Corpus Christi Int'l, 7, major, POINT(-97.5022678710298 27.7744560700823), United States, Corpus Christi, POINT(-97.3767 27.7254) +CUR, Hato Int'l, 7, mid, POINT(-68.9568788072761 12.1848346052019), Curaçao, Willemstad, POINT(-68.935 12.108) +CUZ, Velazco Astete Int'l, 7, major, POINT(-71.9436641449722 -13.5382186992639), Peru, Cusco, POINT(-71.9722 -13.525) +DAR, Julius Nyerere Int'l, 7, mid, POINT(39.2074715039165 -6.86672004249119), Tanzania, Dar es Salaam, POINT(39.2803 -6.8161) +DET, Detroit City, 7, mid, POINT(-83.0039681417733 42.4090938431907), United States, Detroit, POINT(-83.1024 42.3834) +DIL, Presidente Nicolau Lobato Int'l, 7, mid, POINT(125.524854209182 -8.54931157414564), Timor-Leste, Dili, POINT(125.5783 -8.5536) +DME, Moscow Domodedovo Int'l, 7, major, POINT(37.9002531289452 55.4141528223023), Russia, Balashikha, POINT(37.9667 55.8167) +DUD, Dunedin Int'l, 7, mid, POINT(170.200027 -45.923431), New Zealand, Mosgiel, POINT(170.3486 -45.875) +DZA, Dzaoudzi Pamanzi Int'l, 7, mid, POINT(45.2817864197899 -12.8049474381643), Mayotte, Mamoudzou, POINT(45.2272 -12.7794) +ELP, El Paso Int'l, 7, mid, POINT(-106.395714679366 31.7990860272589), Mexico, Juárez, POINT(-106.487 31.7386) +EVN, Zvartnots Int'l, 7, major, POINT(44.4000630536938 40.1523679451884), Armenia, Yerevan, POINT(44.5144 40.1814) +FTW, Fort Worth Meacham Field, 7, major, POINT(-97.3551348561587 32.8207529047972), United States, Fort Worth, POINT(-97.3474 32.7817) +GDT, JAGS McCartney Int'l, 7, mid, POINT(-71.1461337448876 21.4421237439063), Turks and Caicos Islands, Grand Turk, POINT(-71.136 21.4664) +GLS, Scholes Int'l, 7, mid, POINT(-94.8554013876264 29.2671239212096), United States, Galveston, POINT(-94.8913 29.2484) +GOM, Goma Int'l, 7, mid, POINT(29.2400534952228 -1.6583179500207), Congo (Kinshasa), Goma, POINT(29.2336 -1.6794) +GOU, Garoua Int'l, 7, mid, POINT(13.3724309377878 9.33068867678854), Cameroon, Garoua, POINT(13.4 9.3) +GUM, Antonio B. Won Pat Int'l, 7, major, POINT(144.805850357093 13.4926462359465), Guam, Hagåtña, POINT(144.7504 13.4745) +GYY, Gary/Chicago Int'l, 7, mid, POINT(-87.4083596247406 41.6177930015166), United States, Chicago, POINT(-87.6866 41.8375) +HAH, Prince Said Ibrahim Int'l, 7, mid, POINT(43.2745612179616 -11.5366393829127), Comoros, Moroni, POINT(43.256 -11.699) +HBA, Hobart Int'l, 7, mid, POINT(147.505996190408 -42.8376083694822), Australia, Kingston, POINT(147.3083 -42.9769) +HIR, Honiara Int'l, 7, mid, POINT(160.045855129925 -9.42757566400146), Solomon Islands, Honiara, POINT(159.9556 -9.4319) +IEV, Kiev Zhuliany Int'l, 7, mid, POINT(30.4451305182104 50.412808165985), Ukraine, Vyshneve, POINT(30.3581 50.3869) +IKT, Irkutsk S.E., 7, [mid,military], POINT(104.355859748002 52.2728893882244), Russia, Irkutsk, POINT(104.2833 52.2833) +IND, Indianapolis Int'l, 7, major, POINT(-86.2734003650885 39.7302043703969), United States, Indianapolis, POINT(-86.1458 39.7771) +INU, Nauru Int'l, 7, mid, POINT(166.91613965882 -0.545037226856384), Nauru, Yaren, POINT(166.9209 -0.5477) +IPC, Mataveri Int'l, 7, mid, POINT(-109.43006441001 -27.1587738388538), , , +JUJ, Gob. Horacio Guzman Int'l, 7, mid, POINT(-65.0937665458812 -24.3861010775846), Argentina, San Salvador de Jujuy, POINT(-65.3 -24.1833) +KHN, Nanchang Changbei Int'l, 7, mid, POINT(115.911979918602 28.8624891200666), China, Nanchang, POINT(115.8872 28.6842) +KMG, Kunming Wujiaba Int'l, 7, major, POINT(102.742117578823 24.999996110081), China, Kunming, POINT(102.7061 25.0433) +LBA, Leeds Bradford, 7, major, POINT(-1.65983106734746 53.8690819474434), United Kingdom, Bradford, POINT(-1.75 53.8) +LBV, Libreville Leon M'ba Int'l, 7, mid, POINT(9.41022337820712 0.457139229503759), Gabon, Libreville, POINT(9.4542 0.3903) +LFW, Lomé Tokoin, 7, mid, POINT(1.25093205640014 6.16687362722297), Togo, Lomé, POINT(1.2228 6.1319) +LWO, Lviv Danylo Halytskyi Int'l, 7, [mid,military], POINT(23.9461269598944 49.8178506050005), Ukraine, Lviv, POINT(24.0322 49.8425) +MAJ, Marshall Islands Int'l, 7, mid, POINT(171.281919370648 7.06811848557091), Marshall Islands, Majuro, POINT(171.3833 7.0833) +MFM, Macau Int'l, 7, major, POINT(113.57451294862 22.1576572529634), China, Zhuhai, POINT(113.5678 22.2769) +MGQ, Aden Adde Int'l, 7, mid, POINT(45.3036374186202 2.01635311214988), Somalia, Mogadishu, POINT(45.3419 2.0392) +MPM, Maputo Int'l, 7, mid, POINT(32.5741915194782 -25.924276711787), Mozambique, Maputo, POINT(32.5833 -25.9667) +MRU, Sir Seewoosagur Ramgoolam Int'l, 7, mid, POINT(57.6769860076636 -20.4317567793216), Mauritius, Curepipe, POINT(57.5263 -20.3188) +NAP, Naples Int'l, 7, major, POINT(14.2828444340203 40.8780728843639), Italy, Casoria, POINT(14.3 40.9) +NDB, Nouadhibou Int'l, 7, mid, POINT(-17.0334398691538 20.9290523064387), Mauritania, Nouadhibou, POINT(-17.0333 20.9333) +NGB, Ningbo Lishe Int'l, 7, major, POINT(121.461819388484 29.8208231906861), China, Ningbo, POINT(121.5492 29.875) +NKC, Nouakchott Int'l, 7, mid, POINT(-15.9519259252201 18.0979231718174), Mauritania, Nouakchott, POINT(-15.9785 18.0858) +NOU, La Tontouta Int'l, 7, mid, POINT(166.217232118699 -22.0136386248981), New Caledonia, Nouméa, POINT(166.458 -22.2758) +OAK, Oakland Int'l, 7, major, POINT(-122.213261257863 37.7123036951691), United States, San Leandro, POINT(-122.1599 37.7074) +ONT, Ontario Int'l, 7, major, POINT(-117.592327651651 34.060191102066), United States, Rancho Cucamonga, POINT(-117.5667 34.1247) +ORK, Cork, 7, major, POINT(-8.49014199983817 51.8485405419923), Ireland, Cork, POINT(-8.47 51.8972) +PDG, Minangkabau Int'l, 7, mid, POINT(100.285455851791 -0.786045714026273), Indonesia, Padang, POINT(100.3531 -0.95) +PDL, João Paulo II, 7, mid, POINT(-25.6969882198711 37.7433316472933), Portugal, Ponta Delgada, POINT(-25.67 37.74) +PEW, Bacha Khan Int'l, 7, mid, POINT(71.5188149912667 33.9914027889596), Pakistan, Peshawar, POINT(71.5675 34.0144) +PIK, Glasgow Prestwick, 7, mid, POINT(-4.61097163901068 55.5088918105142), United Kingdom, Prestwick, POINT(-4.6142 55.4956) +PMG, Ponta Porã Int'l, 7, mid, POINT(-55.7060793748573 -22.551786560876), Brazil, Ponta Porã, POINT(-55.7258 -22.5358) +PMR, Palmerston N. Int'l, 7, mid, POINT(175.62128328196 -40.3233178852055), New Zealand, Palmerston North, POINT(175.6117 -40.355) +PNI, Pohnpei Int'l, 7, mid, POINT(158.203304490964 6.98130676512123), Federated States of Micronesia, Kolonia, POINT(158.2081 6.9639) +PPT, Tahiti Faa'a Int'l, 7, mid, POINT(-149.609757932429 -17.5594577659942), French Polynesia, Papeete, POINT(-149.5667 -17.5334) +PSA, Pisa Galileo Galilei Int'l, 7, [major,military], POINT(10.4001343718056 43.6983224157664), Italy, Pisa, POINT(10.4 43.7167) +PZU, Port Sudan, 7, [mid,military], POINT(37.216065757542 19.5760636531968), Sudan, Port Sudan, POINT(37.2167 19.6167) +RAI, Praia Int'l, 7, mid, POINT(-23.4862019883587 14.9449889352832), Cabo Verde, Praia, POINT(-23.509 14.918) +RAK, Marrakech-Menara, 7, mid, POINT(-8.02460535907989 31.6022946597764), Morocco, Marrakech, POINT(-8.0089 31.63) +RAR, Rarotonga Int'l, 7, mid, POINT(-159.798156308387 -21.2009821724632), Cook Islands, Avarua, POINT(-159.771 -21.207) +REP, Siem Reap Int'l, 7, major, POINT(103.815780528112 13.4087969693538), Cambodia, Siem Reap, POINT(103.8597 13.3622) +RGA, Hermes Quijada Int'l, 7, mid, POINT(-67.7530268462675 -53.7814746058316), Chile, Puerto Williams, POINT(-67.6167 -54.9333) +RGL, Piloto Civil Norberto Fernandez Int'l, 7, mid, POINT(-69.3064711776731 -51.6116980855402), Argentina, Río Gallegos, POINT(-69.2161 -51.6233) +RNO, Reno-Tahoe Int'l, 7, major, POINT(-119.775283308105 39.5058499014703), United States, Reno, POINT(-119.8483 39.5497) +ROR, Roman Tmetuchl Int'l, 7, mid, POINT(134.532953466159 7.3644955361292), Palau, Koror, POINT(134.4792 7.3419) +SID, Amilcar Cabral Int'l, 7, mid, POINT(-22.9440574079648 16.7347932693385), Cabo Verde, Espargos, POINT(-22.946 16.756) +SJJ, Sarajevo, 7, major, POINT(18.3366185457127 43.8258872246797), Bosnia and Herzegovina, Sarajevo, POINT(18.4131 43.8564) +SKB, Robert L. Bradshaw Int'l, 7, mid, POINT(-62.7142125047316 17.311125840442), Saint Kitts and Nevis, Basseterre, POINT(-62.7333 17.3) +SLA, Martín Miguel de Güemes Int, 7, mid, POINT(-65.4784760437796 -24.8443742713315), Argentina, Salta, POINT(-65.4167 -24.7833) +SPN, Saipan Int'l, 7, mid, POINT(145.723694658638 15.1215167197664), Northern Mariana Islands, Capitol Hill, POINT(145.7546 15.2137) +SRE, Juana Azurduy de Padilla Int'l, 7, mid, POINT(-65.2928631387847 -19.0139157924657), Bolivia, Tarabuco, POINT(-64.9167 -19.1667) +SXM, Princess Juliana Int'l, 7, major, POINT(-63.1122760858602 18.042244021474), Sint Maarten, Philipsburg, POINT(-63.0458 18.0237) +TAI, Ta'izz Int'l, 7, mid, POINT(44.134782731062 13.6854970025574), Yemen, Ta‘izz, POINT(44.0219 13.5789) +TAO, Qingdao Liuting Int'l, 7, mid, POINT(120.380685949061 36.2677578081039), China, Qingdao, POINT(120.4 36.1167) +TKK, Chuuk Int'l, 7, mid, POINT(151.842046037403 7.45761780288443), Federated States of Micronesia, Weno, POINT(151.85 7.45) +TNG, Tangier Ibn Battouta, 7, mid, POINT(-5.91288087655914 35.7257656409274), Morocco, Tangier, POINT(-5.8039 35.7767) +TRW, Bonriki Int'l, 7, mid, POINT(173.145990795301 1.3806686975383), Kiribati, Tarawa, POINT(173.0176 1.3382) +TSE, Astana Int'l, 7, major, POINT(71.4609441399936 51.0269352907712), Kazakhstan, Nur-Sultan, POINT(71.4222 51.1472) +TSN, Tianjin Binhai Int'l, 7, major, POINT(117.352723159919 39.1294609909008), China, Tianjin, POINT(117.2056 39.1467) +TSV, Townsville, 7, [major,military], POINT(146.77067890477 -19.2561814376212), Australia, Townsville, POINT(146.8167 -19.25) +TUC, Teniente Gen. Benjamin Matienzo Int'l, 7, mid, POINT(-65.1081246236248 -26.8357310050714), Argentina, San Miguel de Tucumán, POINT(-65.2167 -26.8167) +TUN, Aeroport Tunis, 7, major, POINT(10.2176992447111 36.8474482177219), Tunisia, Tunis, POINT(10.1817 36.8064) +TUS, Tucson Int'l, 7, major, POINT(-110.937713232132 32.1203523441898), United States, Tucson, POINT(-110.8787 32.1541) +ULN, Chinggis Khaan Int'l, 7, mid, POINT(106.762873994929 47.8525260966684), Mongolia, Ulaanbaatar, POINT(106.9172 47.9203) +URC, Ürümqi Diwopu Int'l, 7, major, POINT(87.4671298487808 43.8983382193653), China, Ürümqi, POINT(87.6125 43.8225) +VLI, Bauerfield Int'l, 7, mid, POINT(168.319622739662 -17.7016990681781), Vanuatu, Port-Vila, POINT(168.3167 -17.7333) +WWK, Wewak Int'l, 7, mid, POINT(143.669102299698 -3.58022689444744), Papua New Guinea, Wewak, POINT(143.6333 -3.55) +XCR, Châlons Vatry, 7, [military,mid], POINT(4.19111982574289 48.7803946138566), France, Châlons-en-Champagne, POINT(4.365 48.9575) +XMN, Xiamen Gaoqi Int'l, 7, major, POINT(118.12696884672 24.537192570557), China, Xiamen, POINT(118.0819 24.4797) +YAP, Yap Int'l, 7, mid, POINT(138.086430283619 9.49791733361348), , , +ZLO, Playa de Oro Int'l, 7, mid, POINT(-104.560095200097 19.1480860285854), Mexico, Cihuatlán, POINT(-104.5667 19.25) +CAY, Cayenne – Rochambeau, 7, mid, POINT(-52.3638068572357 4.82126714308924), French Guiana, Cayenne, POINT(-52.33 4.933) +UIII, Irkutsk N.W., 7, mid, POINT(104.197359284494 52.3616476700131), Russia, Irkutsk, POINT(104.2833 52.2833) +SJW, Shijiazhuang Zhengding Int'l, 7, major, POINT(114.692266598902 38.278140913112), China, Shijiazhuang, POINT(114.5086 38.0422) +GYD, Heydar Aliyev Int'l, 7, major, POINT(50.0498394867405 40.462746883908), Azerbaijan, Baku, POINT(49.8352 40.3667) +LAK, Lakatamia Airbase, 7, [military,mid], POINT(33.322201334899 35.1063448067362), Cyprus, Latsia, POINT(33.3667 35.1) +CFB, Cabo Frio Int'l, 7, mid, POINT(-42.0792517520184 -22.9256317091328), Brazil, Cabo Frio, POINT(-42.0189 -22.8789) +HEM, Helsinki-Malmi, 7, mid, POINT(25.0455353698315 60.2493778499587), Finland, Helsinki, POINT(24.9375 60.1708) +LUX, Luxembourg-Findel, 7, major, POINT(6.21642121728731 49.6343040925102), Luxembourg, Luxembourg, POINT(6.1319 49.6117) +VCE, Venice Marco Polo, 7, major, POINT(12.3410673004369 45.5048477588455), Italy, Mestre, POINT(12.2381 45.4906) +YNY, Yangyang Int'l, 7, mid, POINT(128.66298866884 38.0587824162585), South Korea, Gangneung, POINT(128.9 37.75) +TBT, Tabatinga Int'l, 7, mid, POINT(-69.939473933909 -4.25032469493379), Colombia, Leticia, POINT(-69.9333 -4.2167) +BVB, Boa Vista Int'l, 7, mid, POINT(-60.6922206338682 2.84119534121157), Brazil, Boa Vista, POINT(-60.6714 2.8194) +LPA, Gran Canaria, 7, major, POINT(-15.3899245158461 27.9368899716574), Spain, Las Palmas, POINT(-15.4314 28.1272) +ING, Com. Armando Tola Int'l, 7, mid, POINT(-72.0538569101296 -50.2839008690038), Argentina, El Calafate, POINT(-72.2833 -50.3333) +NYO, Stockholm-Skavsta, 7, mid, POINT(16.9216055584254 58.7851041303448), Sweden, Nyköping, POINT(17.0086 58.7531) +MES, Polonia Int'l, 7, mid, POINT(98.6761925714641 3.56659179990894), Indonesia, Medan, POINT(98.6739 3.5894) +BGF, Bangui M'Poko Int'l, 7, mid, POINT(18.524123630208 4.39885153695957), Central African Republic, Bimbo, POINT(18.5163 4.3313) +HGH, Hangzhou Xiaoshan Int'l, 7, major, POINT(120.432097376313 30.2351862790414), China, Hangzhou, POINT(120.1675 30.25) +CXI, Cassidy Int'l, 7, mid, POINT(-157.34977789343 1.98616119792402), , , +SQQ, Šiauliai Int'l, 7, mid, POINT(23.3831885738691 55.90376945404), Lithuania, Šiauliai, POINT(23.3167 55.9333) +IUE, Niue Int'l, 7, mid, POINT(-169.926129774217 -19.0767129354511), Niue, Alofi, POINT(-169.921 -19.056) +AGT, Guaraní Int'l, 7, mid, POINT(-54.8393995296062 -25.4568570715812), Paraguay, Ciudad del Este, POINT(-54.6167 -25.5167) +AQP, Rodríguez Ballón Int'l, 7, mid, POINT(-71.5679335385285 -16.344552065352), Peru, Arequipa, POINT(-71.5333 -16.4) +VVO, Vladivostok Int'l, 7, [mid,military], POINT(132.139841720715 43.3776492533885), Russia, Vladivostok, POINT(131.9 43.1333) +PRN, Pristina, 7, major, POINT(21.0302690124746 42.5850331153448), Kosovo, Pristina, POINT(21.1622 42.6633) +ANR, Deurne, 6, mid, POINT(4.45092277399909 51.1891285063806), Belgium, Antwerp, POINT(4.4003 51.2178) +LAP, Gen. Márquez de León Int'l, 6, mid, POINT(-110.367197859809 24.0760903521803), Mexico, Los Mochis, POINT(-108.9937 25.7835) +HRB, Harbin Taiping, 6, major, POINT(126.236983030863 45.6206011723245), China, Harbin, POINT(126.6333 45.75) +TRV, Trivandrum Int'l, 6, mid, POINT(76.9189025612913 8.47650993894514), India, Thiruvananthapuram, POINT(76.9525 8.4875) +ADB, Adnan Menderes, 6, major, POINT(27.1492975952664 38.2912347645175), Turkey, İzmir, POINT(27.14 38.42) +NKG, Nanjing Lukou Int'l, 6, major, POINT(118.866102146906 31.7353249296177), China, Nanjing, POINT(118.7789 32.0608) +FPO, Freeport Int'l, 6, mid, POINT(-78.7039343114497 26.548246747189), The Bahamas, Freeport City, POINT(-78.6967 26.5286) +TIP, Tripoli Int'l, 6, major, POINT(13.1442589810713 32.6691695504993), Libya, Az Zāwīyah, POINT(12.7278 32.7522) +YQX, Gander Int'l, 6, mid, POINT(-54.5755719093578 48.9465980060736), Canada, Gander, POINT(-54.6089 48.9569) +DOH, Doha Int'l, 6, [major,military], POINT(51.5585487876547 25.2682461310506), Qatar, Doha, POINT(51.5333 25.2867) +ABQ, Albuquerque Int'l, 6, major, POINT(-106.6166851616 35.0491578018276), United States, Albuquerque, POINT(-106.6465 35.1054) +ANU, V.C. Bird Int'l, 6, mid, POINT(-61.7923676698358 17.1403599371617), Antigua and Barbuda, Saint John’s, POINT(-61.85 17.1167) +APW, Faleolo, 6, mid, POINT(-171.99732221834 -13.8325013323956), Samoa, Apia, POINT(-171.75 -13.8333) +ATZ, Asyut, 6, mid, POINT(31.0162490438011 27.0508158406978), Egypt, Asyūţ, POINT(31.1667 27.1833) +BAH, Bahrain Int'l, 6, major, POINT(50.6260028757534 26.2696971499497), Bahrain, Manama, POINT(50.5775 26.225) +BDL, Bradley Int'l, 6, major, POINT(-72.685394743339 41.9303160058352), United States, Windsor Locks, POINT(-72.6544 41.9267) +BGI, Grantley Adams Int'l, 6, mid, POINT(-59.4874188953158 13.079661104553), Martinique, Fort-de-France, POINT(-61.0667 14.6) +BJL, Yundum Int'l, 6, mid, POINT(-16.6523132698075 13.3438604788942), The Gambia, Serekunda, POINT(-16.6667 13.4333) +BJM, Bujumbura Int'l, 6, mid, POINT(29.3209840169939 -3.32204434913113), Burundi, Bujumbura, POINT(29.3667 -3.3833) +BLZ, Chileka Int'l, 6, mid, POINT(34.9719441837933 -15.6813844793272), Malawi, Blantyre, POINT(35.0058 -15.7861) +BME, Broome Int'l, 6, mid, POINT(122.233850515022 -17.952576129268), Australia, Broome, POINT(122.2361 -17.9619) +BND, Bandar Abbass Int'l, 6, mid, POINT(56.368886456411 27.2103258455145), Iran, Bandar ‘Abbās, POINT(56.2667 27.1833) +BSR, Basrah Int'l, 6, major, POINT(47.6683766633518 30.552799016106), Iraq, Al Başrah, POINT(47.81 30.515) +CJS, Ciudad Juarez Int'l, 6, mid, POINT(-106.435846631055 31.6357566201951), Mexico, Juárez, POINT(-106.487 31.7386) +CMB, Katunayake Int'l, 6, major, POINT(79.8852573421506 7.17807710544221), Sri Lanka, Negombo, POINT(79.8386 7.2111) +CNS, Cairns Int'l, 6, mid, POINT(145.7535848444 -16.8767421554062), Australia, Cairns, POINT(145.78 -16.92) +CNX, Chiang Mai Int'l, 6, major, POINT(98.9681181241593 18.7688473919675), Thailand, Chiang Mai, POINT(98.9986 18.7953) +COS, City of Colorado Springs, 6, major, POINT(-104.700880274111 38.7974248779125), United States, Colorado Springs, POINT(-104.7605 38.8674) +CPE, Ign. Alberto Ongay Int'l, 6, mid, POINT(-90.5036283734038 19.8142247992074), Mexico, Campeche, POINT(-90.5306 19.85) +CSX, Changsha Huanghua Int'l, 6, major, POINT(113.214054203252 28.1899218619451), China, Zhuzhou, POINT(113.1469 27.8407) +CVG, Greater Cincinnati Int'l, 6, major, POINT(-84.6561699153392 39.055418904783), United States, Cincinnati, POINT(-84.506 39.1413) +DAD, Da Nang, 6, major, POINT(108.202706257936 16.053144145167), Vietnam, Quảng Hà, POINT(108.2667 15.9333) +DAL, Dallas Love Field, 6, major, POINT(-96.84986377098 32.8444253732738), United States, Irving, POINT(-96.9702 32.8583) +DAM, Damascus Int'l, 6, major, POINT(36.5128954718126 33.4114366702732), Syria, Qabr as Sitt, POINT(36.3361 33.4472) +DAV, Enrique Malek Int'l, 6, mid, POINT(-82.4317583369387 8.39126106116917), Panama, David, POINT(-82.4333 8.4333) +DIR, Aba Tenna D. Yilma Int'l, 6, mid, POINT(41.857756722253 9.61267784753569), Ethiopia, Dire Dawa, POINT(41.8667 9.6) +DPS, Bali Int'l, 6, major, POINT(115.162322961107 -8.74475731595652), Indonesia, Denpasar, POINT(115.2167 -8.65) +DSM, Des Moines Int'l, 6, major, POINT(-93.6484612563736 41.5327904242113), United States, West Des Moines, POINT(-93.7806 41.5521) +EBB, Entebbe Int'l, 6, mid, POINT(32.4427573135214 0.044940949388672), Uganda, Kampala, POINT(32.5811 0.3136) +FKI, Kisangani Bangoka Int'l, 6, mid, POINT(25.3302714896212 0.492225136917501), Congo (Kinshasa), Kisangani, POINT(25.1911 0.5153) +FOC, Fuzhou Changle Int'l, 6, mid, POINT(119.668043820999 25.9318233148143), China, Fuzhou, POINT(119.2917 26.0769) +GAU, Lokpriya G. Bordoloi Int'l, 6, mid, POINT(91.588229058187 26.1052475924255), India, Guwāhāti, POINT(91.7458 26.1722) +GDN, Gdansk Lech Walesa, 6, major, POINT(18.4684422165911 54.3807025352925), Poland, Gdańsk, POINT(18.6453 54.3475) +GND, Point Salines Int'l, 6, mid, POINT(-61.7858529909285 12.0072683054283), Grenada, Saint David’s, POINT(-61.6806 12.0444) +GOJ, Nizhny Novgorod Int'l, 6, mid, POINT(43.7896337062935 56.2185525910656), Russia, Nizhniy Novgorod, POINT(44.0075 56.3269) +GYM, Gen. José M. Yáñez Int'l, 6, mid, POINT(-110.921651270402 27.9694553962829), Mexico, Heroica Guaymas, POINT(-110.8989 27.9183) +HET, Hohhot Baita Int'l, 6, mid, POINT(111.814681821626 40.8540600906552), China, Hohhot, POINT(111.6629 40.8151) +HLN, Helena Reg., 6, mid, POINT(-111.989896896008 46.6102043529), United States, Helena Valley Southeast, POINT(-111.8973 46.6219) +HMO, Gen. Ignacio P. Garcia Int'l, 6, mid, POINT(-111.051901711819 29.0900772523445), Mexico, Hermosillo, POINT(-110.9542 29.0989) +IAD, Dulles Int'l, 6, major, POINT(-77.4477925769206 38.952774037953), United States, Centreville, POINT(-77.4389 38.839) +ITO, Hilo Int'l, 6, mid, POINT(-155.039629733435 19.7147976868663), United States, Hilo, POINT(-155.0863 19.6883) +JAN, Jackson Int'l, 6, major, POINT(-90.0750986276924 32.3100600273635), United States, Pearl, POINT(-90.0918 32.273) +JAX, Jacksonville Int'l, 6, major, POINT(-81.6835767278311 30.491352730948), United States, Fruit Cove, POINT(-81.6175 30.0972) +KCH, Kuching Int'l, 6, mid, POINT(110.341837054315 1.4872079377901), Malaysia, Kuching, POINT(110.3439 1.5575) +KGL, Kigali Int'l, 6, mid, POINT(30.1348768187856 -1.96365443664138), Rwanda, Kigali, POINT(30.0606 -1.9536) +KRK, Kraków-Balice, 6, major, POINT(19.8009772844504 50.0722630648331), Poland, Kraków, POINT(19.9372 50.0614) +KUF, Kurumoch, 6, major, POINT(50.1472655210191 53.5083848190935), Russia, Samara, POINT(50.1408 53.2028) +KWL, Guilin Liangjiang Int'l, 6, major, POINT(110.04689349777 25.2176055252293), China, Guilin, POINT(110.2864 25.2819) +LAO, Laoag Int'l, 6, mid, POINT(120.533876196127 18.1824180866379), Philippines, Laoag, POINT(120.5936 18.1978) +LGA, LaGuardia, 6, major, POINT(-73.8719858204814 40.7745539398858), United States, New York, POINT(-73.9249 40.6943) +LGW, London Gatwick, 6, major, POINT(-0.162961639139456 51.1557567519275), United Kingdom, Crawley, POINT(-0.1872 51.1092) +LJU, Ljubljana, 6, major, POINT(14.4548126283266 46.2305445554486), Slovenia, Ljubljana, POINT(14.5061 46.0514) +LKO, Amausi Int'l, 6, mid, POINT(80.8841719732472 26.7639328700916), India, Lucknow, POINT(80.95 26.85) +LPG, La Plata, 6, mid, POINT(-57.895382063651 -34.9655441559234), Argentina, Berisso, POINT(-57.8858 -34.8728) +MAM, Gen. Sevando Canales, 6, mid, POINT(-97.5308217121187 25.7708412640619), Mexico, Heroica Matamoros, POINT(-97.5042 25.8797) +MAN, Manchester Int'l, 6, major, POINT(-2.27337159069427 53.3624896066518), United Kingdom, Wythenshawe, POINT(-2.264 53.392) +MCI, Kansas City Int'l, 6, major, POINT(-94.7159148579154 39.2978958263659), United States, Kansas City, POINT(-94.7443 39.1235) +MCT, Seeb Int'l, 6, major, POINT(58.2904804753493 23.5885704175856), Oman, Muscat, POINT(58.5922 23.6139) +MIR, Habib Bourguiba Int'l, 6, mid, POINT(10.753368185054 35.760710442178), Tunisia, Sousse, POINT(10.6333 35.8333) +MRS, Marseille Provence Airport, 6, major, POINT(5.22137917720337 43.4410600016468), France, Marseille, POINT(5.37 43.2964) +NLD, Quetzalcoatl Int'l, 6, mid, POINT(-99.5680081930063 27.4496896508316), Mexico, Nuevo Laredo, POINT(-99.5069 27.4861) +NNG, Nanning Wuwu Int'l, 6, major, POINT(108.168012273331 22.6120370541785), China, Nanning, POINT(108.315 22.8192) +OAX, Xoxocotlán Int'l, 6, mid, POINT(-96.7217959384975 17.0005592569745), Mexico, Oaxaca, POINT(-96.7253 17.0606) +OGG, Kahului, 6, mid, POINT(-156.437429581353 20.8932885151112), United States, Kahului, POINT(-156.4603 20.8715) +OKC, Will Rogers, 6, major, POINT(-97.5961177542092 35.3952774911744), United States, Oklahoma City, POINT(-97.5136 35.4676) +ORF, Norfolk Int'l, 6, major, POINT(-76.2044231712327 36.8982394673674), United States, Virginia Beach, POINT(-76.0435 36.7335) +PBI, Palm Beach Int'l, 6, major, POINT(-80.0901893383387 26.688441666433), United States, West Palm Beach, POINT(-80.132 26.7469) +PBM, Pengel Int'l, 6, mid, POINT(-55.1999113892902 5.45599967797439), Suriname, Paramaribo, POINT(-55.2039 5.8522) +PEE, Bolshesavino, 6, mid, POINT(56.0195602820297 57.9197711231691), Russia, Perm, POINT(56.2489 58.0139) +PEN, Penang Int'l, 6, mid, POINT(100.265786380955 5.29265627790489), Malaysia, George Town, POINT(100.3292 5.4144) +PHC, Port Harcourt Int'l, 6, mid, POINT(6.94989742723191 5.00700347673943), Nigeria, Port Harcourt, POINT(7.0336 4.8242) +PHE, Port Hedland Int'l, 6, mid, POINT(118.631797815615 -20.3781272960723), Australia, Port Hedland, POINT(118.6011 -20.31) +PIR, Pierre Regional, 6, mid, POINT(-100.292641981705 44.3801534668762), United States, Pierre, POINT(-100.3205 44.3748) +PIT, Greater Pittsburgh Int'l, 6, major, POINT(-80.2561290571918 40.4960518915285), United States, Pittsburgh, POINT(-79.9763 40.4397) +PPG, Pago Pago Int'l, 6, mid, POINT(-170.713307053734 -14.3290641850306), American Samoa, Pago Pago, POINT(-170.7046 -14.274) +BHX, Birmingham Int'l, 6, major, POINT(-1.73373170434452 52.4529085542838), United Kingdom, Solihull, POINT(-1.778 52.413) +ROB, Roberts Int'l, 6, mid, POINT(-10.3530851867934 6.24183456554525), Liberia, Harbel, POINT(-10.35 6.2833) +RPR, Raipur, 6, mid, POINT(81.7403775915201 21.1859868561447), India, Bhilai, POINT(81.38 21.21) +SAL, El Salvador Int'l, 6, mid, POINT(-89.0572035692743 13.4447481228616), El Salvador, Santa Tecla, POINT(-89.2406 13.6731) +SAN, San Diego Int'l, 6, major, POINT(-117.197511025731 32.7322645570132), Mexico, Tijuana, POINT(-117.0333 32.525) +SAT, San Antonio Int'l, 6, major, POINT(-98.4719699991559 29.5266203391315), United States, New Braunfels, POINT(-98.1148 29.6994) +SAV, Savannah Int'l, 6, major, POINT(-81.2099647750913 32.1356415522902), United States, Savannah, POINT(-81.1821 32.0286) +SCU, Antonio Maceo, 6, mid, POINT(-75.8398877639791 19.9724288717622), Cuba, Santiago de Cuba, POINT(-75.8294 20.0217) +SLP, Ponciano Arriaga Int'l, 6, mid, POINT(-100.936477816267 22.2557130495903), Mexico, San Luis Potosí, POINT(-100.9761 22.1511) +SMF, Sacramento Int'l, 6, major, POINT(-121.587894877723 38.6927238925554), United States, Elk Grove, POINT(-121.3842 38.4161) +STI, Cibao Int'l, 6, mid, POINT(-70.6941783224468 19.4659219152888), Dominican Republic, Gurabo al Medio, POINT(-70.6727 19.4739) +SVX, Koltsovo, 6, major, POINT(60.8058033432174 56.732245612046), Russia, Yekaterinburg, POINT(60.6128 56.8356) +SYR, Syracuse Hancock Int'l, 6, major, POINT(-76.1130789991049 43.1317844943741), United States, Cicero, POINT(-76.0662 43.1662) +TBZ, Tabriz, 6, mid, POINT(46.244713373574 38.1311107688175), Iran, Tabrīz, POINT(46.3006 38.0814) +TRC, Torreon Int'l, 6, mid, POINT(-103.398787828579 25.5632164399896), Mexico, Torreón, POINT(-103.4486 25.5394) +TUL, Tulsa Int'l, 6, major, POINT(-95.889882271542 36.190127565195), United States, Tulsa, POINT(-95.9042 36.1283) +TYS, Mcghee Tyson, 6, major, POINT(-83.9899378327585 35.8057448027088), United States, Knoxville, POINT(-83.9496 35.9692) +UFA, Ufa Int'l, 6, major, POINT(55.8840773411837 54.5651323578972), Russia, Ufa, POINT(55.9475 54.7261) +UVF, Hewanorra Int'l, 6, mid, POINT(-60.9499737723461 13.7365238050489), Saint Lucia, Vieux Fort, POINT(-60.954 13.728) +WDH, Windhoek Hosea Kutako Int'l, 6, mid, POINT(17.4632259028133 -22.4869531202041), Namibia, Windhoek, POINT(17.0836 -22.57) +YAM, Sault Ste Marie, 6, mid, POINT(-84.5006089999717 46.4854175101926), United States, Sault Ste. Marie, POINT(-84.3723 46.4817) +YDQ, Dawson Cr., 6, mid, POINT(-120.185595619101 55.7394117074557), Canada, Dawson Creek, POINT(-120.2356 55.7606) +YEG, Edmonton Int'l, 6, major, POINT(-113.584492564406 53.3072001619183), Canada, Leduc, POINT(-113.5492 53.2594) +YHZ, Halifax Int'l, 6, major, POINT(-63.5149652501673 44.886545450101), Canada, Moncton, POINT(-64.7714 46.1328) +YKA, Kamloops, 6, mid, POINT(-120.441734763962 50.7051955184591), Canada, Kamloops, POINT(-120.3408 50.6761) +YSB, Sudbury, 6, mid, POINT(-80.7957747817105 46.6227508204893), Canada, North Bay, POINT(-79.45 46.3) +YSJ, Saint John, 6, mid, POINT(-65.8905573681168 45.3292305955017), Canada, Saint John, POINT(-66.0761 45.2806) +YXS, Prince George, 6, mid, POINT(-122.674014743986 53.8842485751138), Canada, Prince George, POINT(-122.7494 53.9169) +YYJ, Victoria Int'l, 6, major, POINT(-123.430624539528 48.640529482179), Canada, Saanich, POINT(-123.381 48.484) +ZAM, Zamboanga Int'l, 6, mid, POINT(122.062432321637 6.9197577480583), Philippines, Zamboanga City, POINT(122.0761 6.9042) +ZGC, Lanzhou Zhongchuan, 6, mid, POINT(103.615415363043 36.5078842461237), China, Lanzhou, POINT(103.8318 36.0617) +ALB, Albany Int'l, 6, mid, POINT(-73.8093518843173 42.7456619801729), United States, Colonie, POINT(-73.7874 42.7396) +MKE, General Mitchell Int'l, 6, major, POINT(-87.9021056250744 42.9479198729586), United States, Milwaukee, POINT(-87.9675 43.0642) +ZHHH, Wang-Chia Tun Airbase, 6, [military,mid], POINT(114.24694737615 30.6017141196702), China, Wuhan, POINT(114.2881 30.5872) +SYX, Sanya Phoenix Int'l, 6, major, POINT(109.40823949108 18.3090959908593), China, Sanya, POINT(109.5036 18.2533) +LXA, Lhasa Gonggar, 6, mid, POINT(90.9005610194027 29.2936936123184), China, Lhasa, POINT(91.1719 29.6534) +HTN, Hotan, 6, mid, POINT(79.8723005212191 37.0400363509765), China, Hotan, POINT(80.0167 37.1) +DRS, Dresden, 6, major, POINT(13.7649671440047 51.1250912428871), Germany, Dresden, POINT(13.74 51.05) +NNA, Kenitra Air Base, 6, [military,major], POINT(-6.597753628116 34.2986673638223), Morocco, Kenitra, POINT(-6.5833 34.25) +QNJ, Annemasse, 6, mid, POINT(6.26491085364159 46.1957283286261), France, Annemasse, POINT(6.2364 46.1958) +NOG, Nogales Int'l, 6, mid, POINT(-110.972721301675 31.2255371741159), Mexico, Heroica Nogales, POINT(-110.9458 31.3186) +SXB, Strasbourg, 6, mid, POINT(7.62784196688924 48.5446961721759), France, Strasbourg, POINT(7.7458 48.5833) +CGN, Cologne/Bonn, 6, major, POINT(7.12235975524539 50.8782596629471), Germany, Cologne, POINT(6.9528 50.9364) +PUS, Kimhae Int'l, 6, major, POINT(128.948801379039 35.1702840636829), South Korea, Busan, POINT(129.075 35.18) +CJU, Jeju Int'l, 6, major, POINT(126.491629401972 33.5247173150399), South Korea, Jeju, POINT(126.5219 33.5097) +SVG, Stavanger Sola, 6, major, POINT(5.6298103297218 58.8821564842185), Norway, Sandnes, POINT(5.7361 58.8517) +TRD, Trondheim Vaernes, 6, major, POINT(10.9168095241445 63.472029381717), Norway, Stjørdalshalsen, POINT(10.9189 63.4712) +CMG, Corumbá Int'l, 6, mid, POINT(-57.6636078925543 -19.0141662885534), Brazil, Corumbá, POINT(-57.6528 -19.0089) +FNC, Madeira, 6, mid, POINT(-16.7756374531213 32.6933642847489), Portugal, Machico, POINT(-16.7667 32.7) +IGU, Foz do Iguaçu Int'l, 6, mid, POINT(-54.4885922735633 -25.5976832162102), Brazil, Foz do Iguaçu, POINT(-54.5875 -25.54) +PVH, Gov. Jorge Teixeira de Oliveira Int'l, 6, mid, POINT(-63.8984625004213 -8.71442482859288), Brazil, Porto Velho, POINT(-63.9039 -8.7619) +BIO, Bilbao, 6, mid, POINT(-2.90609011679805 43.3050829811195), Spain, Bilbao, POINT(-2.9236 43.2569) +PMI, Palma de Mallorca, 6, major, POINT(2.72997660200647 39.5657758586254), Spain, Marratxi, POINT(2.7527 39.6421) +TFN, Tenerife N., 6, major, POINT(-16.3463175679264 28.4875770267731), Spain, La Laguna, POINT(-16.3167 28.4853) +GOT, Gothenburg, 6, major, POINT(12.2938269092573 57.6857493534879), Sweden, Gothenburg, POINT(11.9675 57.7075) +LLA, Lulea, 6, major, POINT(22.1230271243945 65.5490362477616), Sweden, Luleå, POINT(22.1539 65.5844) +AUH, Abu Dhabi Int'l, 6, major, POINT(54.6463293225558 24.4272271529764), United Arab Emirates, Abu Dhabi, POINT(54.3667 24.4667) +CZL, Mohamed Boudiaf Int'l, 6, mid, POINT(6.62194665181219 36.2834409441601), Algeria, Constantine, POINT(6.6147 36.365) +ASW, Aswan Int'l, 6, mid, POINT(32.8244372462973 23.9682765441778), Egypt, Aswān, POINT(32.8997 24.0889) +RVN, Rovaniemi, 6, mid, POINT(25.8294409760452 66.5595564168509), Finland, Rovaniemi, POINT(25.7333 66.5) +GEO, Cheddi Jagan Int'l, 6, mid, POINT(-58.2541191925889 6.49855290813572), Guyana, Bartica, POINT(-58.6167 6.4) +COK, Cochin Int'l, 6, major, POINT(76.3905198502024 10.1551187628118), India, Kochi, POINT(76.28 9.97) +EDL, Eldoret Int'l, 6, mid, POINT(35.2236930658301 0.40507147546036), Kenya, Eldoret, POINT(35.2833 0.5167) +ICN, Incheon Int'l, 6, major, POINT(126.450875980796 37.4492088624346), South Korea, Incheon, POINT(126.6333 37.4833) +CUL, Federal de Bachigualato Int'l, 6, mid, POINT(-107.469863792896 24.7668040390461), Mexico, Culiacán, POINT(-107.3939 24.8069) +ISB, Benazir Bhutto Int'l, 6, [major,military], POINT(73.1007936471882 33.6074457507526), Pakistan, Rawalpindi, POINT(73.0333 33.6) +BRU, Brussels, 5, major, POINT(4.48464032408272 50.8972949641511), Belgium, Brussels, POINT(4.3525 50.8467) +ABV, Abuja Int'l, 5, major, POINT(7.27025993974356 9.00437659781094), Nigeria, Abuja, POINT(7.4833 9.0667) +ACV, Arcata-Eureka, 5, mid, POINT(-124.107065520139 40.9719245381314), United States, McKinleyville, POINT(-124.0857 40.9488) +AUS, Austin-Bergstrom Int'l, 5, major, POINT(-97.6668367646054 30.2021081920749), United States, Round Rock, POINT(-97.6642 30.527) +AYT, Antalya, 5, major, POINT(30.8025526439415 36.9153233051868), Turkey, Antalya, POINT(30.7075 36.8874) +BFS, Belfast Int'l, 5, major, POINT(-6.21616943734958 54.6615575470103), United Kingdom, Belfast, POINT(-5.93 54.5964) +BGY, Orio Al Serio, 5, major, POINT(9.6989176939974 45.6654980560695), Italy, Bergamo, POINT(9.67 45.695) +BKI, Kota Kinabalu Int'l, 5, mid, POINT(116.051087873369 5.92289445474807), Malaysia, Kota Kinabalu, POINT(116.0725 5.975) +BLR, Bengaluru Int'l, 5, major, POINT(77.7095579889575 13.2006108069609), India, Bangalore, POINT(77.5917 12.9789) +CBR, Canberra Int'l, 5, major, POINT(149.190760539671 -35.3071855902909), Australia, Canberra, POINT(149.1269 -35.2931) +CMH, Port Columbus Int'l, 5, major, POINT(-82.8840306426634 39.9981181922432), United States, Gahanna, POINT(-82.8637 40.0251) +CMN, Mohamed V Int'l, 5, major, POINT(-7.5814559902572 33.3747274815396), Morocco, Mediouna, POINT(-7.51 33.45) +DUS, Düsseldorf Int'l, 5, major, POINT(6.76494446612174 51.2781820420774), Germany, Düsseldorf, POINT(6.7833 51.2333) +ESB, Esenboğa Int'l, 5, major, POINT(32.9930100772014 40.1151278273234), Turkey, Ankara, POINT(32.85 39.93) +HLZ, Hamilton Int'l, 5, mid, POINT(175.336221432708 -37.8658411484827), New Zealand, Te Awamutu, POINT(175.3167 -38.0167) +HYD, Rajiv Gandhi Int'l, 5, major, POINT(78.42953613452 17.2359831507471), India, Hyderābād, POINT(78.4867 17.385) +JFK, John F Kennedy Int'l, 5, major, POINT(-73.7863268609295 40.6459595584081), United States, New York, POINT(-73.9249 40.6943) +KBP, Boryspil Int'l, 5, major, POINT(30.8951621615528 50.340902338877), Ukraine, Boryspil, POINT(30.95 50.35) +KRT, Khartoum, 5, major, POINT(32.550153296633 15.5922226530858), Sudan, Khartoum, POINT(32.56 15.5006) +MSN, Dane Cty. Reg. (Truax Field), 5, major, POINT(-89.3457847894487 43.1363082385868), United States, Sun Prairie, POINT(-89.2362 43.1825) +MSQ, Minsk Int'l, 5, major, POINT(28.0341933346378 53.8893792398005), Belarus, Minsk, POINT(27.5667 53.9) +PMO, Palermo, 5, major, POINT(13.1055309888638 38.1863351084895), Italy, Palermo, POINT(13.3613 38.1157) +PVD, T.F. Green, 5, mid, POINT(-71.4357841445789 41.7260019847189), United States, Providence, POINT(-71.4187 41.823) +RSW, Southwest Florida Int'l, 5, major, POINT(-81.7551231409306 26.5279288067651), United States, Cape Coral, POINT(-81.9957 26.6443) +SHE, Shenyang Taoxian Int'l, 5, major, POINT(123.487974430338 41.6347891339582), China, Shenyang, POINT(123.4281 41.8025) +SHJ, Sharjah Int'l, 5, major, POINT(55.5205071948853 25.3211964019068), United Arab Emirates, Dubai, POINT(55.2972 25.2631) +SJC, San Jose Int'l, 5, major, POINT(-121.929428983532 37.3694905908965), United States, Sunnyvale, POINT(-122.0255 37.3836) +SNA, John Wayne, 5, major, POINT(-117.861489220393 33.6794857329549), United States, Mission Viejo, POINT(-117.6551 33.6096) +STR, Stuttgart, 5, major, POINT(9.19395108945536 48.6901051358913), Germany, Stuttgart, POINT(9.18 48.7775) +SYQ, Nacional Tobías Bolaños, 5, mid, POINT(-84.1386091971594 9.95827851919623), Costa Rica, La Uruca, POINT(-84.1327 9.9575) +SZX, Shenzhen Bao'an Int'l, 5, major, POINT(113.815852751085 22.6465077147868), China, Shenzhen, POINT(114.054 22.535) +SDF, Louisville Int'l, 5, major, POINT(-85.7417027597367 38.1860207152699), United States, Jeffersonville, POINT(-85.7026 38.3376) +GVA, Geneva, 5, major, POINT(6.10794577423603 46.231009510158), Switzerland, Le Grand-Saconnex, POINT(6.1167 46.2333) +LYS, Lyon-Saint Exupery, 5, mid, POINT(5.07594431813459 45.7210186834669), France, Lyon, POINT(4.84 45.76) +KIX, Kansai Int'l, 5, major, POINT(135.244459772476 34.4347941629269), Japan, Ōsaka, POINT(135.5022 34.6939) +LIS, Lisbon Portela, 5, major, POINT(-9.13069440931071 38.7707623427514), Portugal, Loures, POINT(-9.1667 38.8333) +CNF, Tancredo Neves Int'l, 5, major, POINT(-43.9635815209949 -19.6327821218747), Brazil, Belo Horizonte, POINT(-43.9333 -19.9167) +BMA, Bromma, 5, mid, POINT(17.9456175406145 59.3555902065112), Sweden, Stockholm, POINT(18.0686 59.3294) +SUB, Juanda Int'l, 5, major, POINT(112.777034594933 -7.383578985276), Indonesia, Surabaya, POINT(112.7378 -7.2458) +MDQ, Astor Piazzolla Int'l, 5, mid, POINT(-57.5816150932392 -37.9332161204482), Argentina, Mar del Plata, POINT(-57.55 -38.0) +GCM, Owen Roberts Int'l, 5, major, POINT(-81.3576706162289 19.2959107437122), , , +CGO, Zhengzhou Xinzheng Int'l, 5, major, POINT(113.841831302845 34.5263027198957), China, Zhengzhou, POINT(113.6605 34.7492) +DLC, Dalian Zhoushuizi Int'l, 5, major, POINT(121.538913780101 38.9615702300222), China, Dalian, POINT(121.6 38.9) +HER, Heraklion Int'l, 5, major, POINT(25.1740558243272 35.3369024101045), Greece, Néa Alikarnassós, POINT(25.1833 35.3167) +TBS, Tbilisi Int'l, 5, major, POINT(44.9646146141664 41.6694420187261), Georgia, Tbilisi, POINT(44.7925 41.7225) +XXC, Cascais, 5, mid, POINT(-9.35458240263928 38.7235353208323), Portugal, Sintra, POINT(-9.3883 38.7992) +KHH, Kaohsiung Int'l, 4, major, POINT(120.345156342151 22.5717061054422), Taiwan, Kaohsiung, POINT(120.2975 22.615) +SKO, Sadiq Abubakar III, 4, mid, POINT(5.20022616032651 12.9174824166181), Nigeria, Sokoto, POINT(5.2339 13.0622) +UIO, Mariscal Sucre Int'l, 4, mid, POINT(-78.4899925545701 -0.145552408466882),Ecuador, Quito, POINT(-78.5125 -0.22) +KHI, Karachi Civil, 4, mid, POINT(67.1521283592947 24.8985243689595), Pakistan, Karachi, POINT(67.01 24.86) +KIV, Kishinev S.E., 4, mid, POINT(28.9360487562255 46.9341619900391), Moldova, Chisinau, POINT(28.8353 47.0228) +LIM, Jorge Chávez, 4, major, POINT(-77.1075656931342 -12.0237161502221), Peru, Callao, POINT(-77.1333 -12.0333) +YQT, Thunder Bay Int'l, 4, mid, POINT(-89.3121421238136 48.3718811492508), Canada, Thunder Bay, POINT(-89.2461 48.3822) +VNO, Vilnius, 4, major, POINT(25.2807164497285 54.6430549307542), Lithuania, Vilnius, POINT(25.28 54.6872) +XIY, Hsien Yang, 4, major, POINT(108.755811342151 34.4429391054422), China, Xi’an, POINT(108.9 34.2667) +NTR, Del Norte Int'l, 4, mid, POINT(-100.238394186577 25.859873767729), Mexico, Ciudad Apodaca, POINT(-100.1886 25.7817) +TBU, Fua'amotu Int'l, 4, mid, POINT(-175.135635 -21.24861), Tonga, Nuku‘alofa, POINT(-175.2 -21.1333) +IFN, Esfahan Int'l, 4, mid, POINT(51.8763916812681 32.7460805344321), Iran, Eşfahān, POINT(51.6675 32.6447) +HRE, Harare Int'l, 4, mid, POINT(31.1014 -17.9228), Zimbabwe, Harare, POINT(31.0522 -17.8292) +KWI, Kuwait Int'l, 4, major, POINT(47.9714825593316 29.2396800581583), Kuwait, Kuwait City, POINT(47.9783 29.3697) +YOW, Macdonald-Cartier Int'l, 4, major, POINT(-75.6648933870205 45.3201348196531), Canada, Gatineau, POINT(-75.65 45.4833) +KBL, Kabul Int'l, 4, mid, POINT(69.2100736270874 34.5633978864149), Afghanistan, Kabul, POINT(69.1783 34.5253) +ABJ, Abidjan Port Bouet, 4, mid, POINT(-3.93221929167636 5.2543984451492), Côte d'Ivoire, Abidjan, POINT(-4.0333 5.3167) +ACA, General Juan N Alvarez Int'l, 4, major, POINT(-99.7545085619681 16.76196735278), Mexico, Acapulco de Juárez, POINT(-99.8825 16.8636) +ACC, Kotoka Int'l, 4, major, POINT(-0.171402855660817 5.60698152381193), Ghana, Accra, POINT(-0.2 5.55) +ADD, Bole Int'l, 4, mid, POINT(38.7931904366343 8.98173027581099), Ethiopia, Addis Ababa, POINT(38.74 9.03) +ADE, Aden Int'l, 4, mid, POINT(45.030602 12.8278), Yemen, Aden, POINT(45.0333 12.8) +ADL, Adelaide Int'l, 4, mid, POINT(138.532101457699 -34.9405860275154), Australia, Adelaide, POINT(138.6 -34.9275) +ALA, Almaty Int'l, 4, major, POINT(77.0120458771175 43.3464943144793), Kazakhstan, Almaty, POINT(76.8958 43.2775) +ALG, Houari Boumediene, 4, major, POINT(3.21207353516506 36.6997206663535), Algeria, Algiers, POINT(3.0589 36.7539) +ALP, Aleppo Int'l, 4, major, POINT(37.2273414057828 36.1846237314314), Syria, Aleppo, POINT(37.16 36.2) +AMD, Sardar Vallabhbhai Patel Int'l, 4, mid, POINT(72.6209000884332 23.0707454635881), India, Ahmedabad, POINT(72.58 23.03) +ANF, Cerro Moreno Int'l, 4, mid, POINT(-70.4409908509407 -23.4489545248317), Chile, Antofagasta, POINT(-70.4 -23.65) +ASB, Ashkhabad Northwest, 4, mid, POINT(58.3639659208246 37.984853438957), Turkmenistan, Ashgabat, POINT(58.3833 37.95) +ASM, Yohannes Iv Int'l, 4, mid, POINT(38.9063540136321 15.2936159696499), Eritrea, Asmara, POINT(38.925 15.3228) +ASU, Silvio Pettirossi Int'l, 4, mid, POINT(-57.5139078247136 -25.2416592533816), Paraguay, Luque, POINT(-57.4872 -25.27) +BDA, Bermuda Int'l, 4, mid, POINT(-64.7027740686514 32.3591739601581), , , +BEG, Surcin, 4, major, POINT(20.2912845946621 44.8190766654433), Serbia, Surčin, POINT(20.2833 44.8) +BEY, Beirut Int'l, 4, major, POINT(35.4930853618161 33.8254400618668), Lebanon, Beirut, POINT(35.5131 33.8869) +BHO, Bairagarh, 4, mid, POINT(77.3408714713579 23.2855684869809), India, Bhopāl, POINT(77.4167 23.25) +BKO, Bamako Sénou, 4, mid, POINT(-7.94727226970801 12.5393363425867), Mali, Bamako, POINT(-7.9922 12.6458) +BNA, Nashville Int'l, 4, major, POINT(-86.6692867356375 36.1314876361697), United States, Nashville, POINT(-86.7842 36.1715) +BNE, Brisbane Int'l, 4, major, POINT(153.120256418844 -27.3853965939099), Australia, Brisbane, POINT(153.0281 -27.4678) +BOI, Boise Air Terminal, 4, major, POINT(-116.221841070549 43.5689592234704), United States, Boise, POINT(-116.2308 43.6005) +BRW, Wiley Post Will Rogers Mem., 4, mid, POINT(-156.771835 71.289299), , , +BUF, Greater Buffalo Int'l, 4, major, POINT(-78.7319965523308 42.9340337493526), United States, Cheektowaga, POINT(-78.7466 42.9082) +BUQ, Bulawayo, 4, mid, POINT(28.622552042904 -20.0155684094908), Zimbabwe, Bulawayo, POINT(28.58 -20.17) +BWN, Brunei Int'l, 4, major, POINT(114.933119029209 4.94547528227685), Brunei, Bandar Seri Begawan, POINT(114.9422 4.8903) +CAN, Guangzhou Baiyun Int'l, 4, major, POINT(113.297516552171 23.3891511573243), China, Guangzhou, POINT(113.26 23.13) +CCP, Carriel Sur Int'l, 4, mid, POINT(-73.0621061746214 -36.7763727437881), Chile, Talcahuano, POINT(-73.1219 -36.7167) +CCU, Netaji Subhash Chandra Bose Int'l, 4, major, POINT(88.4400010130197 22.6453893785064), India, Kolkāta, POINT(88.37 22.5675) +CGP, Chittagong, 4, mid, POINT(91.8147107162383 22.2455658585738), Bangladesh, Chattogram, POINT(91.8325 22.335) +CHC, Christchurch Int'l, 4, major, POINT(172.538675565223 -43.4885486784104), New Zealand, Rolleston, POINT(172.3833 -43.5833) +CKY, Conakry, 4, mid, POINT(-13.6210656251671 9.57418115850082), Guinea, Conakry, POINT(-13.7122 9.5092) +CLE, Hopkins Int'l, 4, major, POINT(-81.8384406064046 41.4111916124966), United States, Akron, POINT(-81.5219 41.0798) +CLO, Alfonso Bonilla Aragón Int'l, 4, mid, POINT(-76.3850714728091 3.54328635123219), Colombia, Cali, POINT(-76.5222 3.4206) +COO, Cotonou Cadjehon, 4, mid, POINT(2.3838000724352 6.3582465034691), Benin, Cotonou, POINT(2.4333 6.3667) +COR, Ingeniero Ambrosio L.V. Taravella Int'l, 4, mid, POINT(-64.2123157670801 -31.3156811684889), Argentina, Villa Allende, POINT(-64.3 -31.3) +CTG, Rafael Nunez, 4, mid, POINT(-75.5123349559682 10.4449381764915), Colombia, Turbaco, POINT(-75.3333 10.35) +CUN, Cancún, 4, major, POINT(-86.8744172506694 21.04019667144), Mexico, Cancún, POINT(-86.8475 21.1606) +CUU, General R F Villalobos Int'l, 4, mid, POINT(-105.969204692629 28.7039984997679), Mexico, Chihuahua, POINT(-106.0889 28.6353) +DAC, Zia Int'l Dhaka, 4, mid, POINT(90.4049241599237 23.8481243218127), Bangladesh, Dhaka, POINT(90.3889 23.7639) +DRW, Darwin Int'l, 4, [major,military], POINT(130.877501436774 -12.4080559966556), Australia, Darwin, POINT(130.8411 -12.4381) +DUR, Louis Botha, 4, mid, POINT(30.9457081940881 -29.965914250828), South Africa, Durban, POINT(31.05 -29.8833) +FBM, Lubumbashi Luano Int'l, 4, mid, POINT(27.5292 -11.5908), Congo (Kinshasa), Lubumbashi, POINT(27.4794 -11.6647) +FEZ, Saiss, 4, mid, POINT(-4.98214637678303 33.9305251844673), Morocco, Fès, POINT(-5.0033 34.0433) +FIH, Kinshasa N Djili Int'l, 4, mid, POINT(15.4465162074561 -4.38916882197582), Congo (Kinshasa), Kinshasa, POINT(15.3222 -4.325) +FNA, Freetown Lungi, 4, mid, POINT(-13.2002296786483 8.61542361726369), Sierra Leone, Port Loko, POINT(-12.7875 8.7667) +FNJ, Sunan, 4, mid, POINT(125.675321571201 39.2001771667656), North Korea, Pyongyang, POINT(125.7381 39.0194) +FRU, Vasilyevka, 4, major, POINT(74.468800339909 43.0554527233303), Kyrgyzstan, Bishkek, POINT(74.6122 42.8747) +GBE, Sir Seretse Khama Int'l, 4, mid, POINT(25.9243808264147 -24.5580718089441), Botswana, Gaborone, POINT(25.9122 -24.6581) +GDL, Don Miguel Hidalgo Int'l, 4, major, POINT(-103.300766222752 20.5246863485173), Mexico, Tlaquepaque, POINT(-103.3167 20.6167) +GLA, Glasgow Int'l, 4, major, POINT(-4.43167796995107 55.8641828570355), United Kingdom, Paisley, POINT(-4.4239 55.8456) +GUA, La Aurora, 4, mid, POINT(-90.530181111378 14.5881608290051), Guatemala, Guatemala City, POINT(-90.5252 14.6099) +GYE, Simon Bolivar Int'l, 4, mid, POINT(-79.887009643933 -2.15833790699136), Ecuador, Guayaquil, POINT(-79.8875 -2.19) +HAN, Noi Bai, 4, major, POINT(105.803759436806 21.2145596707245), Vietnam, Hanoi, POINT(105.8542 21.0283) +HAV, José Martí Int'l, 4, major, POINT(-82.4074206289499 22.9973533364428), Cuba, Havana, POINT(-82.3589 23.1367) +HBE, Borg El Arab Int'l, 4, mid, POINT(29.69266601523 30.9183712786239), Egypt, Al ‘Ajamī, POINT(29.7604 31.0959) +JED, King Abdul Aziz Int'l, 4, major, POINT(39.1504996780448 21.6706857878128), Saudi Arabia, Jeddah, POINT(39.1728 21.5433) +KAN, Kano Mallam Aminu Int'l, 4, mid, POINT(8.52213718395767 12.0457071601746), Nigeria, Kano, POINT(8.5167 12.0) +KHG, Kashi, 4, mid, POINT(76.0130148060075 39.5379686306258), China, Kashgar, POINT(75.9938 39.4681) +KIN, Norman Manley Int'l, 4, major, POINT(-76.7786897616576 17.9375751552752), Jamaica, Portmore, POINT(-76.8799 17.95) +KTM, Tribhuvan Int'l, 4, mid, POINT(85.357139531668 27.7002816751609), Nepal, Kathmandu, POINT(85.324 27.7172) +LAD, Luanda 4 de Fevereiro, 4, mid, POINT(13.2347957502699 -8.84831327917379), Angola, Luanda, POINT(13.2344 -8.8383) +LED, Pulkovo 2, 4, major, POINT(30.3070976454648 59.8054061601897), Russia, Saint Petersburg, POINT(30.3167 59.95) +LHE, Allama Iqbal Int'l, 4, mid, POINT(74.4108810181748 31.5206296518206), Pakistan, Lahore, POINT(74.3436 31.5497) +LLW, Kamuzu Int'l, 4, mid, POINT(33.7827885019788 -13.788622823746), Malawi, Lilongwe, POINT(33.7833 -13.9833) +LOS, Lagos Murtala Muhammed, 4, major, POINT(3.32112435281334 6.57825944540467), Nigeria, Ikeja, POINT(3.3426 6.6186) +LPB, El Alto Int'l, 4, mid, POINT(-68.1780055277945 -16.5098792213977), Bolivia, El Alto, POINT(-68.1633 -16.5047) +LUN, Lusaka Int'l, 4, mid, POINT(28.4455443211019 -15.3268522509447), Zambia, Lusaka, POINT(28.2833 -15.4167) +LXR, Luxor, 4, mid, POINT(32.7032970848623 25.6730347786023), Egypt, Luxor, POINT(32.65 25.6833) +MAA, Chennai Int'l, 4, major, POINT(80.1637759731545 12.9825301669154), India, Chennai, POINT(80.275 13.0825) +MAR, La Chinita Int'l, 4, mid, POINT(-71.7237688094687 10.5557594684972), Venezuela, Maracaibo, POINT(-71.6333 10.6333) +MDE, José María Córdova, 4, mid, POINT(-75.4269557399772 6.171001614358), Colombia, Medellín, POINT(-75.5906 6.2308) +MEM, Memphis Int'l, 4, major, POINT(-89.9816280353237 35.0444101240089), United States, Southaven, POINT(-89.9786 34.9514) +MGA, Augusto Cesar Sandino Int'l, 4, mid, POINT(-86.1712846229543 12.144635873435), Nicaragua, Managua, POINT(-86.2738 12.1544) +MHD, Mashhad, 4, major, POINT(59.6421943574029 36.2275503134984), Iran, Mashhad, POINT(59.6 36.3) +MIA, Miami Int'l, 4, major, POINT(-80.2789718277441 25.7949407212406), United States, Hialeah, POINT(-80.3045 25.8696) +MID, Lic M Crecencio Rejon Int'l, 4, mid, POINT(-89.6630235736434 20.9338603864296), Mexico, Kanasín, POINT(-89.5578 20.9344) +MLA, Luqa, 4, major, POINT(14.4952644555055 35.8489307943501), Malta, Valletta, POINT(14.5125 35.8983) +MBA, Moi Int'l, 4, major, POINT(39.6026631870383 -4.03265262579657), Kenya, Mombasa, POINT(39.6667 -4.05) +MSU, Moshoeshoe I Int'l, 4, mid, POINT(27.5592160333614 -29.4555740046101), Lesotho, Maseru, POINT(27.48 -29.31) +MSY, New Orleans Int'l, 4, major, POINT(-90.2566939480594 29.9851141460622), United States, New Orleans, POINT(-89.9288 30.0687) +MUX, Multan, 4, [major,military], POINT(71.418995432932 30.1950780904965), Pakistan, Multan, POINT(71.4697 30.1978) +MVD, Carrasco Int'l, 4, major, POINT(-56.026636146282 -34.8410485988569), Uruguay, Montevideo, POINT(-56.1819 -34.8836) +MZT, General Rafael Buelna Int'l, 4, mid, POINT(-106.270016617885 23.1665960971344), Mexico, Mazatlán, POINT(-106.4167 23.2167) +NAS, Nassau Int'l, 4, major, POINT(-77.4648472290944 25.0486910600866), The Bahamas, Nassau, POINT(-77.3386 25.0781) +NDJ, Ndjamena, 4, mid, POINT(15.0330446385559 12.1295400184115), Chad, N’Djamena, POINT(15.05 12.11) +NIM, Niamey, 4, mid, POINT(2.17730671184125 13.4767572807942), Niger, Niamey, POINT(2.1175 13.515) +CEB, Mactan-Cebu Int'l, 4, major, POINT(123.979134508664 10.3158756727292), Philippines, Lapu-Lapu City, POINT(123.9488 10.3127) +NOV, Nova Lisboa, 4, mid, POINT(15.7497618459595 -12.8025414575915), Angola, Huambo, POINT(15.7347 -12.7767) +OMA, Eppley Airfield, 4, mid, POINT(-95.8994157953121 41.2997111453012), United States, Omaha, POINT(-96.0529 41.2627) +OME, Nome, 4, mid, POINT(-165.441641712281 64.5072207026631), , , +OUA, Ouagadougou, 4, mid, POINT(-1.51380536165114 12.3535800260473), Burkina Faso, Ouagadougou, POINT(-1.5275 12.3686) +PAP, Mais Gate Int'l, 4, mid, POINT(-72.2944780260473 18.5756829054286), Haiti, Port-au-Prince, POINT(-72.3333 18.5333) +PBC, Puebla, 4, mid, POINT(-98.375759790423 19.163793546584), Mexico, Puebla, POINT(-98.1833 19.0333) +PDX, Portland Int'l, 4, major, POINT(-122.592738881254 45.5889569315305), United States, Vancouver, POINT(-122.5967 45.6366) +PER, Perth Int'l, 4, major, POINT(115.974224942233 -31.9411297945783), Australia, Kwinana, POINT(115.7702 -32.2394) +PLZ, H F Verwoerd, 4, mid, POINT(25.6117777567602 -33.9840877431374), South Africa, Port Elizabeth, POINT(25.6 -33.9581) +PMC, El Tepual Int'l, 4, mid, POINT(-73.0983841336424 -41.4333820702269), Chile, Puerto Montt, POINT(-72.9333 -41.4667) +PNH, Pochentong, 4, major, POINT(104.845027612457 11.5526449176513), Cambodia, Phnom Penh, POINT(104.9211 11.5694) +PNQ, Pune, 4, [major,military], POINT(73.9089838110016 18.5791766115328), India, Pune, POINT(73.8567 18.5203) +POM, Port Moresby Int'l, 4, major, POINT(147.211250855977 -9.43865269316142), Papua New Guinea, Port Moresby, POINT(147.1494 -9.4789) +PTY, Tocumen Int'l, 4, major, POINT(-79.3871348215438 9.06687242265839), Panama, Tocumen, POINT(-79.38 9.08) +PUQ, Carlos Ibáñez de Campo Int'l, 4, mid, POINT(-70.8431237851324 -53.0050698255177), Chile, Punta Arenas, POINT(-70.9333 -53.1667) +RDU, Durham Int'l, 4, major, POINT(-78.7913814006751 35.8752323452255), United States, Raleigh, POINT(-78.6429 35.8324) +RGN, Mingaladon, 4, major, POINT(96.1341946114947 16.9011542818251), Myanmar, Rangoon, POINT(96.16 16.795) +RIX, Riga, 4, major, POINT(23.9793791116995 56.9220038786097), Latvia, Jūrmala, POINT(23.7703 56.9681) +SAH, Sanaa Int'l, 4, mid, POINT(44.2246467902561 15.4739027755737), Yemen, Sanaa, POINT(44.2064 15.3483) +SDA, Baghdad Int'l, 4, major, POINT(44.2289125352942 33.268162986377), Iraq, Baghdad, POINT(44.3661 33.3153) +SDQ, De Las Américas Int'l, 4, major, POINT(-69.6764726754667 18.4302196948173), Dominican Republic, Santo Domingo Este, POINT(-69.8734 18.4855) +SGN, Tan Son Nhat, 4, major, POINT(106.664246141375 10.8163005571879), Vietnam, Ho Chi Minh City, POINT(106.7019 10.7756) +SKG, Thessaloniki, 4, major, POINT(22.9764353610613 40.5238736887775), Greece, Thessaloníki, POINT(22.9347 40.6403) +SOF, Vrazhdebna, 4, major, POINT(23.4024521357708 42.6891841273195), Bulgaria, Sofia, POINT(23.33 42.7) +STV, Surat, 4, major, POINT(72.7424384372589 21.1204503297172), India, Sūrat, POINT(72.8311 21.1702) +SUV, Nausori Int'l, 4, mid, POINT(178.560048369959 -18.0458996922854), Fiji, Nausori, POINT(178.5454 -18.0244) +SYZ, Shiraz Int'l, 4, major, POINT(52.5897712745211 29.5458013842874), Iran, Shīrāz, POINT(52.5425 29.61) +TAM, Gen Francisco J Mina Int'l, 4, mid, POINT(-97.8698137568394 22.2893319525064), Mexico, Tampico, POINT(-97.8686 22.2553) +TGU, Toncontin Int'l, 4, mid, POINT(-87.2192116348986 14.0599852192071), Honduras, Tegucigalpa, POINT(-87.2167 14.1) +THR, Mehrabad Int'l, 4, major, POINT(51.3208069717572 35.6913743304946), Iran, Tehran, POINT(51.3889 35.6892) +TIA, Tirane Rinas, 4, major, POINT(19.7150324049722 41.4208514680567), Albania, Tirana, POINT(19.8178 41.3289) +TIJ, General Abelardo L Rodriguez Int'l, 4, major, POINT(-116.975476095598 32.5460499135013), Mexico, Tijuana, POINT(-117.0333 32.525) +TLC, Jose Maria Morelos Y Pavon, 4, mid, POINT(-99.5706494463542 19.3386880423032), Mexico, Mexico City, POINT(-99.1333 19.4333) +TLL, Ulemiste, 4, major, POINT(24.798964869983 59.4165014697451), Estonia, Tallinn, POINT(24.7453 59.4372) +TLV, Ben Gurion, 4, major, POINT(34.8708499180995 32.0007468501844), Israel, Tel Aviv-Yafo, POINT(34.78 32.08) +TMS, São Tomé Salazar, 4, mid, POINT(6.71282193005667 0.374744213699427), Sao Tome and Principe, Neves, POINT(6.5517 0.3592) +TNR, Antananarivo Ivato, 4, mid, POINT(47.4753540009579 -18.7993348763082), Madagascar, Antananarivo, POINT(47.5167 -18.9333) +TPA, Tampa Int'l, 4, major, POINT(-82.534824252055 27.9800400852184), United States, Tampa, POINT(-82.4447 27.9945) +VLN, Zim Valencia, 4, mid, POINT(-67.9223617121873 10.1540056883979), Venezuela, Los Guayos, POINT(-67.9333 10.1833) +VOG, Gumrak, 4, mid, POINT(44.354767968489 48.7916764657611), Russia, Volgograd, POINT(44.5147 48.7086) +VTE, Vientiane, 4, mid, POINT(102.568238195728 17.9754595948321), Laos, Vientiane, POINT(102.6 17.9667) +VVI, Viru Viru Int'l, 4, mid, POINT(-63.1403888218213 -17.6479468257839), Bolivia, Warnes, POINT(-63.1667 -17.5167) +WLG, Wellington Int'l, 4, major, POINT(174.811665268238 -41.3289891844659), New Zealand, Lower Hutt, POINT(174.9167 -41.2167) +YPR, Prince Rupert, 4, mid, POINT(-130.445587 54.292), Canada, Prince Rupert, POINT(-130.3271 54.3122) +YQG, Windsor, 4, mid, POINT(-82.9600877389448 42.2658784727198), United States, Detroit, POINT(-83.1024 42.3834) +YQR, Regina, 4, mid, POINT(-104.655433975371 50.4332192867183), Canada, Regina, POINT(-104.6067 50.4547) +YVR, Vancouver Int'l, 4, major, POINT(-123.180867003812 49.1935590395715), Canada, Surrey, POINT(-122.8489 49.19) +YWG, Winnipeg Int'l, 4, major, POINT(-97.2267694809585 49.9033302471671), Canada, Winnipeg, POINT(-97.1464 49.8844) +YXE, John G Diefenbaker Int'l, 4, mid, POINT(-106.690181967554 52.1701439447381), Canada, Saskatoon, POINT(-106.6833 52.1333) +YXY, Whitehorse Int'l, 4, mid, POINT(-135.076210089402 60.7141521481397), Canada, Whitehorse, POINT(-135.0691 60.7029) +YYC, Calgary Int'l, 4, major, POINT(-114.010560500236 51.1308572567549), Canada, Calgary, POINT(-114.0667 51.05) +YYG, Charlottetown, 4, mid, POINT(-63.1312341333234 46.2858131367525), Canada, Charlottetown, POINT(-63.1347 46.2403) +YYQ, Churchill, 4, mid, POINT(-94.0813639506318 58.7497237849788), , , +YYT, St John's Int'l, 4, mid, POINT(-52.7433337428638 47.6131179007955), Canada, St. John's, POINT(-52.7971 47.4817) +YZF, Yellowknife, 4, mid, POINT(-114.437846335049 62.4707373610202), Canada, Yellowknife, POINT(-114.4053 62.4709) +ZAG, Zagreb, 4, major, POINT(16.0615138009014 45.7333266730984), Croatia, Velika Gorica, POINT(16.0667 45.7) +ZNZ, Zanzibar, 4, mid, POINT(39.2223319841558 -6.21857034620282), Tanzania, Zanzibar, POINT(39.199 -6.165) +REK, Reykjavik Air Terminal, 4, mid, POINT(-21.9466344031327 64.1318728609901), Iceland, Reykjavík, POINT(-21.94 64.1467) +ARH, Arkhangelsk-Talagi, 4, mid, POINT(40.7133465694594 64.5967437730455), Russia, Arkhangelsk, POINT(40.5333 64.55) +KZN, Kazan Int'l, 4, major, POINT(49.2984458036407 55.6080601429764), Russia, Kazan, POINT(49.1089 55.7964) +ORY, Paris Orly, 4, major, POINT(2.36737912783773 48.7313030458052), France, Vitry-sur-Seine, POINT(2.3928 48.7875) +YQB, Québec, 4, major, POINT(-71.3839280711731 46.7915684363308), Canada, Quebec City, POINT(-71.2081 46.8139) +YUL, Montréal-Trudeau, 4, major, POINT(-73.7493162650417 45.4583512294531), Canada, Montréal, POINT(-73.5617 45.5089) +NRT, Narita Int'l, 4, major, POINT(140.384401709179 35.7640560727828), Japan, Chiba, POINT(140.1064 35.6073) +NGO, Chubu Centrair Int'l, 4, major, POINT(136.814771286824 34.8590296958162), Japan, Nagoya, POINT(136.9 35.1833) +OKD, Okadama, 4, mid, POINT(141.382100450075 43.1106495990978), Japan, Sapporo, POINT(141.35 43.0667) +BGO, Bergen Flesland, 4, major, POINT(5.22725311562336 60.2890610502966), Norway, Askøy, POINT(5.15 60.4667) +TOS, Tromsø Langnes, 4, major, POINT(18.9072624292132 69.6796790473478), Norway, Tromsø, POINT(18.9428 69.6828) +BEL, Val de Caes Int'l, 4, mid, POINT(-48.4795602893793 -1.38974628795546), Brazil, Ananindeua, POINT(-48.3719 -1.3658) +CGR, Campo Grande Int'l, 4, mid, POINT(-54.6689498781305 -20.4572717360311), Brazil, Campo Grande, POINT(-54.615 -20.4839) +CWB, Afonso Pena Int'l, 4, mid, POINT(-49.1737093663469 -25.5360001430558), Brazil, Curitiba, POINT(-49.2711 -25.4297) +FOR, Pinto Martins Int'l, 4, mid, POINT(-38.5407472498334 -3.77859496233091), Brazil, Fortaleza, POINT(-38.5275 -3.7275) +GRU, São Paulo-Guarulhos Int'l, 4, major, POINT(-46.481753608842 -23.4261155770421), Brazil, São Paulo, POINT(-46.6333 -23.55) +GYN, Santa Genoveva, 4, mid, POINT(-49.2266464905994 -16.6323665721637), Brazil, Goiânia, POINT(-49.25 -16.6667) +POA, Salgado Filho Int'l, 4, mid, POINT(-51.1770409488172 -29.9901930170609), Brazil, Porto Alegre, POINT(-51.23 -30.0331) +REC, Gilberto Freyre Int'l, 4, mid, POINT(-34.9182667174851 -8.13162553076239), Brazil, Recife, POINT(-34.9 -8.05) +SSA, Deputado Luis Eduardo Magalhaes Int'l, 4, mid, POINT(-38.3347989911732 -12.9143614970326), Brazil, Camaçari, POINT(-38.3239 -12.6978) +MDZ, El Plumerillo, 4, mid, POINT(-68.7984838394473 -32.8278001692719), Argentina, Godoy Cruz, POINT(-68.8333 -32.9167) +MAO, Eduardo Gomes Int'l, 4, mid, POINT(-60.0460645898854 -3.0321390062591), Brazil, Manaus, POINT(-60.0167 -3.1) +NSI, Yaoundé Nsimalen Int'l, 4, mid, POINT(11.5479941396807 3.71484520708126), Cameroon, Yaoundé, POINT(11.5167 3.8667) +PVG, Shanghai Pudong Int'l, 4, major, POINT(121.801518760578 31.1523090295533), China, Shanghai, POINT(121.4667 31.1667) +ADJ, Marka Int'l, 4, mid, POINT(35.9841052362449 31.9741994015442), Jordan, Amman, POINT(35.9328 31.9497) +MLE, Male Int'l, 4, major, POINT(73.5273902836844 4.18870090323372), Maldives, Male, POINT(73.5089 4.1753) +VER, Gen. Heriberto Jara Int'l, 4, mid, POINT(-96.1835702143695 19.1424237025017), Mexico, Veracruz, POINT(-96.1533 19.1903) +OXB, Osvaldo Vieira Int'l, 4, mid, POINT(-15.651185561666 11.8889231454855), Guinea-Bissau, Bissau, POINT(-15.5667 11.85) +DVO, Francisco Bangoy Int'l, 4, major, POINT(125.645066609434 7.13053746163073), Philippines, Davao, POINT(125.6 7.0667) +SEZ, Seychelles Int'l, 4, mid, POINT(55.5115519246793 -4.67106914178521), , , +DKR, Léopold Sedar Senghor Int'l, 4, major, POINT(-17.490407907719 14.7456306146748), Senegal, Dakar, POINT(-17.4467 14.6928) +PZU, Port Sudan New Int'l, 4, mid, POINT(37.2387475981025 19.4341052385231), Sudan, Port Sudan, POINT(37.2167 19.6167) +TAS, Tashkent Int'l, 4, major, POINT(69.2666137241129 41.2622338767383), Uzbekistan, Tashkent, POINT(69.2797 41.3111) +CPH, Copenhagen, 3, major, POINT(12.6493508684508 55.6285017221528), Denmark, Copenhagen, POINT(12.5683 55.6761) +BBU, Aeroportul National Bucuresti-Baneasa, 3, mid, POINT(26.0857251587764 44.497041455972), Romania, Bucharest, POINT(26.1039 44.4325) +BUD, Ferihegy, 3, major, POINT(19.2622301677881 47.433274269248), Hungary, Budapest, POINT(19.0514 47.4925) +CKG, Chongqing Jiangbei Int'l, 3, major, POINT(106.638019704811 29.7240422241688), China, Chongqing, POINT(106.5069 29.55) +CLT, Douglas Int'l, 3, major, POINT(-80.9439277342763 35.2204281685597), United States, Gastonia, POINT(-81.1854 35.2494) +DTW, Detroit Metro, 3, major, POINT(-83.3478935065615 42.2257204508004), United States, Detroit, POINT(-83.1024 42.3834) +DUB, Dublin, 3, major, POINT(-6.24388491037139 53.42700828497), Ireland, Finglas, POINT(-6.2181 53.4597) +FAI, Fairbanks Int'l, 3, major, POINT(-147.865721120795 64.8180981117369), United States, Fairbanks, POINT(-147.6533 64.8353) +HAM, Hamburg, 3, major, POINT(10.005647830925 53.6320011640866), Germany, Norderstedt, POINT(10.0103 53.7064) +KUL, Kuala Lumpur Int'l, 3, major, POINT(101.713886325743 2.74751295791811), Malaysia, Kuala Lumpur, POINT(101.6953 3.1478) +LAS, Mccarran Int'l, 3, major, POINT(-115.151323951283 36.0849602383367), United States, Sunrise Manor, POINT(-115.0487 36.1783) +MCO, Orlando Int'l, 3, major, POINT(-81.3073713307985 28.4311506791138), United States, Orlando, POINT(-81.337 28.4773) +MSP, Minneapolis St. Paul Int'l, 3, major, POINT(-93.2081003718301 44.8820263631968), United States, Minneapolis, POINT(-93.2678 44.9635) +MUC, Franz-Josef-Strauss, 3, major, POINT(11.7880627192437 48.3538373961608), Germany, Munich, POINT(11.575 48.1375) +PHL, Philadelphia Int'l, 3, major, POINT(-75.2429857676998 39.876087236427), United States, Philadelphia, POINT(-75.1339 40.0077) +PHX, Sky Harbor Int'l, 3, major, POINT(-112.01363529773 33.4358607639498), United States, Phoenix, POINT(-112.0892 33.5722) +SLC, Salt Lake City Int'l, 3, major, POINT(-111.981984879993 40.7867290053708), United States, West Valley City, POINT(-112.0123 40.6886) +STL, Lambert St Louis Int'l, 3, major, POINT(-90.3659545350675 38.7427163155204), United States, St. Louis, POINT(-90.2451 38.6359) +WAW, Okecie Int'l, 3, major, POINT(20.9727263383587 52.171026749259), Poland, Piaseczno, POINT(21.0167 52.0667) +ZRH, Zurich Int'l, 3, major, POINT(8.56221279534765 47.4523895064915), Switzerland, Zürich, POINT(8.5411 47.3744) +CRL, Gosselies, 3, mid, POINT(4.4543736298165 50.4571296549567), Belgium, Brussels, POINT(4.3525 50.8467) +MUCf, Munich Freight Terminal, 3, major, POINT(11.7694828593654 48.3497964078377), Germany, Munich, POINT(11.575 48.1375) +BCN, Barcelona, 3, major, POINT(2.07800334981292 41.3031552797463), Spain, El Prat de Llobregat, POINT(2.0953 41.3246) +PRG, Ruzyn, 3, major, POINT(14.2674849854076 50.1076511703671), Czechia, Prague, POINT(14.4214 50.0875) +HKG, Hong Kong Int'l, 2, major, POINT(113.935016387376 22.3153328280868), China, Shenzhen, POINT(114.054 22.535) +TPE, Taoyuan, 2, major, POINT(121.231370453323 25.0767411043346), Taiwan, Taipei, POINT(121.5625 25.0375) +AMS, Schiphol, 2, major, POINT(4.76437693232812 52.3089323889822), Netherlands, Hoofddorp, POINT(4.6907 52.3061) +SIN, Singapore Changi, 2, major, POINT(103.986413880993 1.35616083528126), Singapore, Singapore, POINT(103.8 1.3) +LHR, London Heathrow, 2, major, POINT(-0.453156652063309 51.4709958799938), United Kingdom, Hounslow, POINT(-0.375 51.4668) +AKL, Auckland Int'l, 2, major, POINT(174.791719433715 -37.0063551142815), New Zealand, Auckland, POINT(174.74 -36.8406) +ANC, Anchorage Int'l, 2, major, POINT(-149.981725100633 61.1728936745367), United States, Knik-Fairview, POINT(-149.6252 61.5082) +ATL, Hartsfield-Jackson Atlanta Int'l, 2, major, POINT(-84.4253974336047 33.6405290807352), United States, Atlanta, POINT(-84.422 33.7628) +PEK, Beijing Capital, 2, major, POINT(116.588174004661 40.078766336331), China, Beijing, POINT(116.4075 39.904) +BOG, Eldorado Int'l, 2, major, POINT(-74.1433718001028 4.69883276192097), Colombia, Bogotá, POINT(-74.0722 4.7111) +BOM, Chhatrapati Shivaji Int'l, 2, major, POINT(72.8745639500051 19.0951019488402), India, Mumbai, POINT(72.8775 19.0761) +BOS, Gen E L Logan Int'l, 2, major, POINT(-71.0164066172958 42.3665658198506), United States, Revere, POINT(-71.004 42.4189) +BWI, Baltimore-Washington Int'l Thurgood Marshall,2, major, POINT(-76.6686428352448 39.1793943583568), United States, Baltimore, POINT(-76.6144 39.3051) +CAI, Cairo Int'l, 2, major, POINT(31.3997430067114 30.1119904385575), Egypt, Giza, POINT(31.2118 29.987) +CAS, Casablanca-Anfa, 2, mid, POINT(-7.66321880771143 33.5627883851079), Morocco, Mediouna, POINT(-7.51 33.45) +CCS, Simón Bolivar Int'l, 2, mid, POINT(-67.0057488076316 10.5973549146064), Venezuela, Catia La Mar, POINT(-67.0333 10.6) +CPT, Cape Town Int'l, 2, major, POINT(18.5976565083138 -33.9704466120395), South Africa, Mitchells Plain, POINT(18.6181 -34.0506) +CTU, Chengdushuang Liu, 2, major, POINT(103.956136481695 30.5810712647464), China, Chengdu, POINT(104.0633 30.66) +DEL, Indira Gandhi Int'l, 2, major, POINT(77.0878362565332 28.5592039760586), India, Najafgarh, POINT(76.9798 28.6092) +DEN, Denver Int'l, 2, major, POINT(-104.673797338542 39.8494613881509), United States, Denver, POINT(-104.8758 39.762) +DFW, Dallas-Ft. Worth Int'l, 2, major, POINT(-97.0403710741144 32.9001505594816), United States, Irving, POINT(-96.9702 32.8583) +DMK, Don Muang Int'l, 2, major, POINT(100.602578626505 13.9202766010347), Thailand, Bangkok, POINT(100.4942 13.7525) +DXB, Dubai Int'l, 2, major, POINT(55.3540769172243 25.2525655938182), United Arab Emirates, Dubai, POINT(55.2972 25.2631) +EWR, Newark Int'l, 2, major, POINT(-74.1771472796706 40.6904798278929), United States, New York, POINT(-73.9249 40.6943) +EZE, Ministro Pistarini Int'l, 2, major, POINT(-58.5412456939382 -34.8136469380323), Argentina, José María Ezeiza, POINT(-58.5167 -34.8333) +FLL, Fort Lauderdale Hollywood Int'l, 2, major, POINT(-80.1452588465189 26.0717095746827), United States, Hollywood, POINT(-80.1679 26.0293) +IAH, George Bush Intercontinental, 2, major, POINT(-95.3337047912947 29.9865909034907), United States, Houston, POINT(-95.3885 29.786) +IST, Atatürk Hava Limani, 2, major, POINT(28.8195493087893 40.9778388177797), Turkey, Istanbul, POINT(28.955 41.0136) +JNB, OR Tambo Int'l, 2, major, POINT(28.2319885648741 -26.1320953994887), South Africa, Johannesburg, POINT(28.0456 -26.2044) +JNU, Juneau Int'l, 2, mid, POINT(-134.583573037872 58.3589441045951), United States, Juneau, POINT(-134.1739 58.4546) +LAX, Los Angeles Int'l, 2, major, POINT(-118.402468548522 33.9441742543586), United States, Los Angeles, POINT(-118.4068 34.1141) +LIN, Linate, 2, major, POINT(9.27996629691061 45.4603938456252), Italy, Milan, POINT(9.19 45.4669) +MEL, Melbourne Int'l, 2, major, POINT(144.848998091131 -37.6699411967893), Australia, Melton, POINT(144.5833 -37.6833) +MEX, Lic Benito Juarez Int'l, 2, major, POINT(-99.0826079514239 19.4354695720494), Mexico, Mexico City, POINT(-99.1333 19.4333) +MNL, Ninoy Aquino Int'l, 2, major, POINT(121.004122083437 14.5068323762967), Philippines, Manila, POINT(120.9772 14.5958) +NBO, Jomo Kenyatta Int'l, 2, major, POINT(36.9250887490365 -1.33052964350634), Kenya, Nairobi, POINT(36.8172 -1.2864) +HNL, Honolulu Int'l, 2, major, POINT(-157.919783173755 21.332022315024), United States, Honolulu, POINT(-157.846 21.3294) +ORD, Chicago O'Hare Int'l, 2, major, POINT(-87.90513439065 41.9765291023803), United States, Chicago, POINT(-87.6866 41.8375) +RUH, King Khalid Int'l, 2, major, POINT(46.701829023464 24.9590317436512), Saudi Arabia, Riyadh, POINT(46.7167 24.6333) +SCL, Arturo Merino Benitez Int'l, 2, major, POINT(-70.7936860162974 -33.3968336342597), Chile, Quilicura, POINT(-70.7333 -33.3667) +SEA, Tacoma Int'l, 2, major, POINT(-122.302289722924 47.4435819127259), United States, Seattle, POINT(-122.3244 47.6211) +SFO, San Francisco Int'l, 2, major, POINT(-122.383470344449 37.6170250868053), United States, South San Francisco, POINT(-122.4196 37.6538) +SHA, Hongqiao, 2, major, POINT(121.341183788567 31.1872574314078), China, Shanghai, POINT(121.4667 31.1667) +SVO, Sheremtyevo, 2, major, POINT(37.4159690348414 55.966447172512), Russia, Khimki, POINT(37.445 55.8892) +YYZ, Toronto-Pearson Int'l, 2, major, POINT(-79.6114193247449 43.6809595186356), Canada, Toronto, POINT(-79.3733 43.7417) +SYD, Kingsford Smith, 2, major, POINT(151.166067305601 -33.9365832057717), Australia, Sydney, POINT(151.21 -33.8678) +HEL, Helsinki Vantaa, 2, major, POINT(24.9682078665914 60.3187158912982), Finland, Helsinki, POINT(24.9375 60.1708) +CDG, Charles de Gaulle Int'l, 2, major, POINT(2.54186776739457 49.0144200969386), France, Aulnay-sous-Bois, POINT(2.4906 48.9386) +TXL, Berlin-Tegel Int'l, 2, major, POINT(13.2903090925074 52.5544287044101), Germany, Hohen Neuendorf, POINT(13.2833 52.6667) +VIE, Vienna Schwechat Int'l, 2, major, POINT(16.5607679642129 48.1197563052538), Hungary, Sopron, POINT(16.5831 47.6849) +FRA, Frankfurt Int'l, 2, major, POINT(8.57182286907608 50.0506770895207), Germany, Frankfurt, POINT(8.6822 50.1106) +FCO, Leonardo da Vinci Int'l, 2, major, POINT(12.2501008973638 41.7950786307394), Italy, Fiumicino, POINT(12.2333 41.7667) +ITM, Osaka Int'l, 2, major, POINT(135.442475256249 34.7901980848749), Japan, Ōsaka, POINT(135.5022 34.6939) +GMP, Gimpo Int'l, 2, major, POINT(126.802392860276 37.5573005399508), South Korea, Seoul, POINT(126.99 37.56) +OSL, Oslo Gardermoen, 2, major, POINT(11.0991032762581 60.1935783171386), Norway, Oslo, POINT(10.7389 59.9133) +BSB, Juscelino Kubitschek Int'l, 2, major, POINT(-47.9207885133625 -15.8699985002824), Brazil, Brasília, POINT(-47.8828 -15.7939) +CGH, Congonhas Int'l, 2, major, POINT(-46.6591155302196 -23.62685882701), Brazil, São Paulo, POINT(-46.6333 -23.55) +GIG, Rio de Janeiro-Antonio Carlos Jobim Int'l, 2, major, POINT(-43.2483813790683 -22.8123437125006), Brazil, Rio de Janeiro, POINT(-43.2056 -22.9111) +MAD, Madrid Barajas, 2, major, POINT(-3.56902665458863 40.4681282733923), Spain, Torrejón de Ardoz, POINT(-3.4978 40.4614) +SJU, Luis Muñoz Marin, 2, major, POINT(-66.0042299757548 18.4380770734949), Puerto Rico, Carolina, POINT(-65.9792 18.4054) +ARN, Arlanda, 2, major, POINT(17.9307299016916 59.6511203397372), Sweden, Stockholm, POINT(18.0686 59.3294) +CGK, Soekarno-Hatta Int'l, 2, major, POINT(106.654296151172 -6.1266029559729), Indonesia, Jakarta, POINT(106.8275 -6.175) +ATH, Eleftherios Venizelos Int'l, 2, major, POINT(23.9471160554073 37.9362331299254), Greece, Piraeus, POINT(23.6469 37.943) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv new file mode 100644 index 0000000000000..f8701f386e73b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv @@ -0,0 +1,249 @@ +id:keyword,name:keyword,shape:geo_shape +FLK,Falkland Is.,"BBOX(-61.148055\, -57.733200\, -51.249455\, -52.343055)" +GUF,French Guiana,"BBOX(-54.603782\, -51.648055\, 5.755418\, 2.113473)" +GUY,Guyana,"BBOX(-61.389727\, -56.470636\, 8.535273\, 1.186873)" +PCN,Pitcairn Is.,"BBOX(-130.105055\, -128.286118\, -24.325836\, -25.082227)" +SGS,South Georgia & the South Sandwich Is.,"BBOX(-38.023755\, -26.241391\, -53.989727\, -58.498609)" +SHN,St. Helena,"BBOX(-5.792782\, -5.645282\, -15.903755\, -16.021946)" +SUR,Suriname,"BBOX(-58.071400\, -53.986118\, 6.001809\, 1.836245)" +TTO,Trinidad & Tobago,"BBOX(-61.921600\, -60.520836\, 11.345554\, 10.040345)" +VEN,Venezuela,"BBOX(-73.378064\, -59.803055\, 12.197500\, 0.649164)" +ASM,American Samoa,"BBOX(-170.823227\, -170.561873\, -14.254309\, -14.375555)" +COK,Cook Is.,"BBOX(-165.848345\, -157.703764\, -10.881318\, -21.940836)" +PYF,French Polynesia,"BBOX(-151.497773\, -138.809755\, -8.778191\, -17.870836)" +UMI,Jarvis I.,"BBOX(-160.045164\, -160.009464\, -0.374309\, -0.398055)" +NIU,Niue,"BBOX(-169.952236\, -169.781555\, -18.963336\, -19.145555)" +WSM,Samoa,"BBOX(-172.780027\, -171.429200\, -13.460555\, -14.057500)" +TKL,Tokelau,"BBOX(-171.862718\, -171.843764\, -9.170627\, -9.218891)" +TON,Tonga,"BBOX(-175.360000\, -173.906827\, -18.568055\, -21.268064)" +WLF,Wallis & Futuna,"BBOX(-178.190273\, -176.121936\, -13.214864\, -14.323891)" +ARG,Argentina,"BBOX(-73.582300\, -53.650009\, -21.780518\, -55.051673)" +BOL,Bolivia,"BBOX(-69.656191\, -57.521118\, -9.679191\, -22.901109)" +BRA,Brazil,"BBOX(-74.004591\, -34.792918\, 5.272709\, -33.741118)" +CHL,Chile,"BBOX(-109.446109\, -66.420627\, -17.505282\, -55.902227)" +ECU,Ecuador,"BBOX(-91.663891\, -75.216846\, 1.437782\, -5.000309)" +PRY,Paraguay,"BBOX(-62.643773\, -54.243900\, -19.296809\, -27.584727)" +PER,Peru,"BBOX(-81.355146\, -68.673909\, -0.036873\, -18.348546)" +URY,Uruguay,"BBOX(-58.438609\, -53.098300\, -30.096673\, -34.943818)" +UMI,Baker I.,"BBOX(-176.467655\, -176.455855\, 0.222573\, 0.215282)" +CAN,Canada,"BBOX(-141.002991\, -52.617364\, 83.113873\, 41.675554)" +GTM,Guatemala,"BBOX(-92.246782\, -88.214736\, 17.821109\, 13.745836)" +UMI,Howland I.,"BBOX(-176.643082\, -176.631091\, 0.808609\, 0.790282)" +UMI,Johnston Atoll,"BBOX(-169.538936\, -169.523927\, 16.730273\, 16.724164)" +MEX,Mexico,"BBOX(-118.404164\, -86.738618\, 32.718454\, 14.550545)" +UMI,Midway Is.,"BBOX(-177.395845\, -177.360545\, 28.221518\, 28.184154)" +BRB,Barbados,"BBOX(-59.659446\, -59.427082\, 13.337082\, 13.050554)" +DMA,Dominica,"BBOX(-61.491391\, -61.250700\, 15.631945\, 15.198054)" +GRD,Grenada,"BBOX(-61.785182\, -61.596391\, 12.237154\, 11.996945)" +GLP,Guadeloupe,"BBOX(-61.796109\, -61.187082\, 16.512918\, 15.870000)" +MTQ,Martinique,"BBOX(-61.231536\, -60.816946\, 14.880136\, 14.402773)" +LCA,St. Lucia,"BBOX(-61.079582\, -60.878064\, 14.109309\, 13.709445)" +SPM,St. Pierre & Miquelon,"BBOX(-56.397782\, -56.145500\, 47.135827\, 46.747191)" +VCT,St. Vincent & the Grenadines,"BBOX(-61.280146\, -61.120282\, 13.383191\, 13.130282)" +ABW,Aruba,"BBOX(-70.059664\, -69.874864\, 12.627773\, 12.411109)" +BMU,Bermuda,"BBOX(-64.823064\, -64.676809\, 32.379509\, 32.260554)" +DOM,Dominican Republic,"BBOX(-72.003064\, -68.322927\, 19.930827\, 17.604164)" +HTI,Haiti,"BBOX(-74.467791\, -71.629182\, 20.091454\, 18.022782)" +JAM,Jamaica,"BBOX(-78.373900\, -76.221118\, 18.522500\, 17.697218)" +ANT,Netherlands Antilles,"BBOX(-69.163618\, -68.192927\, 12.383891\, 12.020554)" +BHS,The Bahamas,"BBOX(-78.978900\, -72.738891\, 26.929164\, 20.915273)" +TCA,Turks & Caicos Is.,"BBOX(-72.031464\, -71.127573\, 21.957773\, 21.429918)" +BLZ,Belize,"BBOX(-89.216400\, -87.779591\, 18.489900\, 15.889854)" +CYM,Cayman Is.,"BBOX(-81.400836\, -81.093064\, 19.354164\, 19.265000)" +COL,Colombia,"BBOX(-81.720146\, -66.870455\, 12.590273\, -4.236873)" +CRI,Costa Rica,"BBOX(-85.911391\, -82.561400\, 11.212845\, 8.025673)" +CUB,Cuba,"BBOX(-84.952927\, -74.131255\, 23.194027\, 19.821945)" +SLV,El Salvador,"BBOX(-90.108064\, -87.694673\, 14.431982\, 13.156391)" +HND,Honduras,"BBOX(-89.350491\, -83.131855\, 16.435827\, 12.985173)" +NIC,Nicaragua,"BBOX(-87.689827\, -83.131855\, 15.022218\, 10.709691)" +PAN,Panama,"BBOX(-83.030291\, -77.198336\, 9.620136\, 7.206109)" +AIA,Anguilla,"BBOX(-63.167782\, -62.972709\, 18.272982\, 18.164445)" +ATG,Antigua & Barbuda,"BBOX(-61.891109\, -61.666946\, 17.724300\, 16.989718)" +VGB,British Virgin Is.,"BBOX(-64.698482\, -64.324527\, 18.504854\, 18.383891)" +MSR,Montserrat,"BBOX(-62.236946\, -62.138891\, 16.812354\, 16.671391)" +PRI,Puerto Rico,"BBOX(-67.266400\, -65.301118\, 18.519445\, 17.922218)" +KNA,St. Kitts & Nevis,"BBOX(-62.862782\, -62.622509\, 17.410136\, 17.208882)" +VIR,Virgin Is.,"BBOX(-65.023509\, -64.562573\, 18.387673\, 17.676664)" +FRO,Faroe Is.,"BBOX(-7.433473\, -6.389718\, 62.357500\, 61.388327)" +GRL,Greenland,"BBOX(-73.053609\, -12.157637\, 83.623600\, 59.790273)" +XGK,Guernsey,"BBOX(-2.668609\, -2.500973\, 49.508191\, 49.422491)" +ISL,Iceland,"BBOX(-24.538400\, -13.499446\, 66.536100\, 63.390000)" +IRL,Ireland,"BBOX(-10.474727\, -6.013055\, 55.379991\, 51.445545)" +XIM,Isle of Man,"BBOX(-4.787155\, -4.308682\, 54.416382\, 54.055545)" +SJM,Jan Mayen,"BBOX(-9.119909\, -7.928509\, 71.180818\, 70.803863)" +XJE,Jersey,"BBOX(-2.247364\, -2.015000\, 49.261109\, 49.167773)" +GBR,United Kingdom,"BBOX(-8.171664\, 1.749445\, 60.843327\, 49.955273)" +CPV,Cape Verde,"BBOX(-25.360555\, -22.666109\, 17.192364\, 14.811109)" +CIV,Cote d'Ivoire,"BBOX(-8.606382\, -2.487782\, 10.735254\, 4.344718)" +GHA,Ghana,"BBOX(-3.248891\, 1.202782\, 11.155691\, 4.727082)" +GIB,Gibraltar,"BBOX(-5.356173\, -5.334509\, 36.163309\, 36.112073)" +LBR,Liberia,"BBOX(-11.492327\, -7.368400\, 8.512782\, 4.343609)" +MAR,Morocco,"BBOX(-13.174964\, -1.011809\, 35.919164\, 27.664236)" +PRT,Portugal,"BBOX(-31.289027\, -6.190455\, 42.150673\, 32.637500)" +ESP,Spain,"BBOX(-18.169864\, 4.316945\, 43.764300\, 27.637500)" +ESH,Western Sahara,"BBOX(-17.101527\, -8.666391\, 27.666954\, 20.764100)" +BFA,Burkina Faso,"BBOX(-5.520837\, 2.397927\, 15.082773\, 9.395691)" +GIN,Guinea,"BBOX(-15.080837\, -7.653373\, 12.677500\, 7.193927)" +GNB,Guinea-Bissau,"BBOX(-16.717773\, -13.643891\, 12.684718\, 10.925100)" +MLI,Mali,"BBOX(-12.244837\, 4.251391\, 25.000273\, 10.142154)" +MRT,Mauritania,"BBOX(-17.075555\, -4.806109\, 27.290454\, 14.725636)" +SEN,Senegal,"BBOX(-17.532782\, -11.369927\, 16.690618\, 12.301745)" +SLE,Sierra Leone,"BBOX(-13.295609\, -10.264309\, 9.997500\, 6.923609)" +GMB,The Gambia,"BBOX(-16.821664\, -13.798609\, 13.826391\, 13.059973)" +DJI,Djibouti,"BBOX(41.759854\, 43.420409\, 12.708327\, 10.942218)" +ERI,Eritrea,"BBOX(36.443282\, 43.121382\, 17.994882\, 12.363891)" +ETH,Ethiopia,"BBOX(32.991800\, 47.988245\, 14.883609\, 3.406664)" +MNG,Mongolia,"BBOX(87.761100\, 119.931509\, 52.142773\, 41.586654)" +SDN,Sudan,"BBOX(21.829100\, 38.607500\, 22.232218\, 3.493391)" +UGA,Uganda,"BBOX(29.574300\, 35.009718\, 4.222782\, -1.476109)" +ISR,Gaza Strip,"BBOX(34.216663\, 34.558891\, 31.596100\, 31.216545)" +IRQ,Iraq,"BBOX(38.794700\, 48.560691\, 37.383673\, 29.061664)" +ISR,Israel,"BBOX(34.267582\, 35.681109\, 33.270273\, 29.486709)" +JOR,Jordan,"BBOX(34.960418\, 39.301109\, 33.377591\, 29.188891)" +KAZ,Kazakhstan,"BBOX(46.499163\, 87.348209\, 55.442627\, 40.594436)" +NOR,Norway,"BBOX(4.789582\, 31.073536\, 71.154709\, 57.987918)" +RUS,Russia,"BBOX(-180.000000\, 180.000000\, 81.851927\, 41.196582)" +SWE,Sweden,"BBOX(11.113336\, 24.167009\, 69.060300\, 55.339164)" +ISR,West Bank,"BBOX(34.888191\, 35.570609\, 32.546391\, 31.350691)" +DZA,Algeria,"BBOX(-8.667218\, 11.986473\, 37.089854\, 18.976391)" +AND,Andorra,"BBOX(1.421391\, 1.781718\, 42.655964\, 42.436382)" +CMR,Cameroon,"BBOX(8.502363\, 16.207000\, 13.085000\, 1.654164)" +CAF,Central African Republic,"BBOX(14.418891\, 27.459718\, 11.000836\, 2.221264)" +LBY,Libya,"BBOX(9.311391\, 25.151663\, 33.171136\, 19.499064)" +MCO,Monaco,"BBOX(7.390900\, 7.439291\, 43.768300\, 43.727545)" +TUN,Tunisia,"BBOX(7.492218\, 11.581663\, 37.340409\, 30.234391)" +BEN,Benin,"BBOX(0.776663\, 3.855000\, 12.396654\, 6.218718)" +TCD,Chad,"BBOX(13.461945\, 24.002745\, 23.450554\, 7.458536)" +GNQ,Equatorial Guinea,"BBOX(8.424163\, 11.353891\, 3.763336\, 0.930154)" +KIR,Kiribati,"BBOX(-157.581700\, 172.947509\, 2.033054\, 1.335991)" +NER,Niger,"BBOX(0.166663\, 15.996663\, 23.522309\, 11.693273)" +NGA,Nigeria,"BBOX(2.692500\, 14.649654\, 13.891500\, 4.272845)" +STP,Sao Tome & Principe,"BBOX(6.465136\, 7.463473\, 1.701245\, 0.018336)" +TGO,Togo,"BBOX(-0.149764\, 1.797800\, 11.138536\, 6.100545)" +ALB,Albania,"BBOX(19.288536\, 21.053327\, 42.660345\, 39.645000)" +BIH,Bosnia & Herzegovina,"BBOX(15.740591\, 19.619782\, 45.265945\, 42.565827)" +HRV,Croatia,"BBOX(13.504791\, 19.425000\, 46.535827\, 42.399991)" +ITA,Italy,"BBOX(6.623963\, 18.514445\, 47.094582\, 36.649164)" +MKD,Macedonia,"BBOX(20.458818\, 23.030973\, 42.358954\, 40.855891)" +MLT,Malta,"BBOX(14.329100\, 14.570000\, 35.991936\, 35.800000)" +SMR,San Marino,"BBOX(12.406945\, 12.511109\, 43.986873\, 43.898682)" +SMN,Serbia & Montenegro,"BBOX(18.453327\, 23.005000\, 46.181109\, 41.849000)" +VTC,Vatican City,"BBOX(12.444473\, 12.457718\, 41.908391\, 41.900891)" +BGR,Bulgaria,"BBOX(22.365273\, 28.605136\, 44.224718\, 41.243045)" +CYP,Cyprus,"BBOX(32.269863\, 34.586036\, 35.688609\, 34.640273)" +EGY,Egypt,"BBOX(24.706800\, 36.895827\, 31.646945\, 21.994164)" +GEO,Georgia,"BBOX(40.002963\, 46.710818\, 43.584718\, 41.048045)" +GRC,Greece,"BBOX(19.640000\, 28.238045\, 41.747773\, 34.930545)" +LBN,Lebanon,"BBOX(35.100827\, 36.623745\, 34.647500\, 33.062082)" +SYR,Syria,"BBOX(35.614463\, 42.378327\, 37.290545\, 32.313609)" +TUR,Turkey,"BBOX(25.665827\, 44.820545\, 42.109991\, 35.818445)" +AUT,Austria,"BBOX(9.533573\, 17.166382\, 49.018745\, 46.407491)" +CZE,Czech Republic,"BBOX(12.093700\, 18.852218\, 51.052491\, 48.581382)" +DNK,Denmark,"BBOX(8.092918\, 15.149163\, 57.745973\, 54.561936)" +HUN,Hungary,"BBOX(16.111800\, 22.894800\, 48.576173\, 45.748327)" +POL,Poland,"BBOX(14.147636\, 24.143473\, 54.836036\, 49.002918)" +SVK,Slovakia,"BBOX(16.844718\, 22.558054\, 49.600827\, 47.737500)" +SVN,Slovenia,"BBOX(13.383473\, 16.607873\, 46.876245\, 45.425818)" +SJM,Svalbard,"BBOX(10.487918\, 33.637500\, 80.764163\, 74.343045)" +BEL,Belgium,"BBOX(2.541663\, 6.398200\, 51.501245\, 49.508882)" +FRA,France,"BBOX(-4.790282\, 9.562218\, 51.091109\, 41.364927)" +DEU,Germany,"BBOX(5.865000\, 15.033818\, 55.056527\, 47.274718)" +LIE,Liechtenstein,"BBOX(9.474636\, 9.633891\, 47.274545\, 47.057454)" +LUX,Luxembourg,"BBOX(5.734445\, 6.524027\, 50.181809\, 49.448464)" +NLD,Netherlands,"BBOX(3.370863\, 7.210973\, 53.465827\, 50.753882)" +CHE,Switzerland,"BBOX(5.967009\, 10.488209\, 47.806664\, 45.829436)" +USA,United States,"BBOX(-178.216555\, 179.775936\, 71.351436\, 18.925482)" +BLR,Belarus,"BBOX(23.165400\, 32.740054\, 56.167491\, 51.251845)" +EST,Estonia,"BBOX(21.837354\, 28.194091\, 59.664718\, 57.522636)" +FIN,Finland,"BBOX(19.511391\, 31.581963\, 70.088609\, 59.806800)" +LVA,Latvia,"BBOX(20.968609\, 28.235963\, 58.083254\, 55.674836)" +LTU,Lithuania,"BBOX(20.942836\, 26.813054\, 56.449854\, 53.890336)" +MDA,Moldova,"BBOX(26.634991\, 30.128709\, 48.468318\, 45.448645)" +ROM,Romania,"BBOX(20.261027\, 29.672218\, 48.263882\, 43.623309)" +UKR,Ukraine,"BBOX(22.151445\, 40.178745\, 52.378600\, 44.379154)" +IND,India,"BBOX(68.144227\, 97.380536\, 35.505618\, 6.745827)" +MDV,Maldives,"BBOX(72.863391\, 73.637272\, 7.027773\, -0.641664)" +OMN,Oman,"BBOX(51.999291\, 59.847082\, 26.368709\, 16.642782)" +SOM,Somalia,"BBOX(40.988609\, 51.411318\, 11.979164\, -1.674873)" +LKA,Sri Lanka,"BBOX(79.696091\, 81.891663\, 9.828191\, 5.918054)" +TKM,Turkmenistan,"BBOX(51.250182\, 66.670882\, 42.796173\, 35.145991)" +UZB,Uzbekistan,"BBOX(55.997491\, 73.167545\, 45.570591\, 37.184991)" +YEM,Yemen,"BBOX(42.555973\, 54.473473\, 18.999345\, 12.144718)" +ARM,Armenia,"BBOX(43.454163\, 46.620536\, 41.297054\, 38.841145)" +AZE,Azerbaijan,"BBOX(44.778863\, 51.677009\, 42.710754\, 38.262809)" +BHR,Bahrain,"BBOX(50.453327\, 50.796391\, 26.288891\, 25.571945)" +IRN,Iran,"BBOX(44.034954\, 63.330273\, 39.779154\, 25.075973)" +KWT,Kuwait,"BBOX(46.546945\, 48.416591\, 30.084164\, 28.538882)" +QAT,Qatar,"BBOX(50.751936\, 51.615827\, 26.152500\, 24.556045)" +SAU,Saudi Arabia,"BBOX(34.572145\, 55.666109\, 32.154945\, 16.377500)" +ARE,United Arab Emirates,"BBOX(51.583327\, 56.381663\, 26.083882\, 22.633327)" +AFG,Afghanistan,"BBOX(60.504163\, 74.915736\, 38.471982\, 29.406109)" +KGZ,Kyrgyzstan,"BBOX(69.249500\, 80.281582\, 43.216900\, 39.195473)" +NPL,Nepal,"BBOX(80.052200\, 88.194554\, 30.424718\, 26.368364)" +PAK,Pakistan,"BBOX(60.866300\, 77.823927\, 37.060791\, 23.688045)" +TJK,Tajikistan,"BBOX(67.364700\, 75.187482\, 41.049254\, 36.671845)" +BGD,Bangladesh,"BBOX(88.043872\, 92.669345\, 26.626136\, 20.744818)" +BTN,Bhutan,"BBOX(88.751936\, 92.114218\, 28.325000\, 26.703609)" +BRN,Brunei,"BBOX(114.095082\, 115.360263\, 5.053054\, 4.018191)" +CHN,China,"BBOX(73.620045\, 134.768463\, 53.553745\, 18.168882)" +JPN,Japan,"BBOX(123.678863\, 145.812409\, 45.486382\, 24.251391)" +PRK,North Korea,"BBOX(124.323954\, 130.697418\, 43.006100\, 37.671382)" +PLW,Palau,"BBOX(134.452482\, 134.658872\, 7.729445\, 7.305254)" +PHL,Philippines,"BBOX(116.950000\, 126.598036\, 19.391109\, 5.049164)" +KOR,South Korea,"BBOX(126.099018\, 129.586872\, 38.625245\, 33.192209)" +KHM,Cambodia,"BBOX(102.346509\, 107.636382\, 14.708618\, 10.422736)" +LAO,Laos,"BBOX(100.091372\, 107.695254\, 22.499927\, 13.926664)" +MYS,Malaysia,"BBOX(99.641936\, 119.275818\, 7.352918\, 0.852782)" +MMR,Myanmar,"BBOX(92.204991\, 101.169427\, 28.546527\, 9.839582)" +SGP,Singapore,"BBOX(103.640945\, 103.997945\, 1.445282\, 1.259027)" +THA,Thailand,"BBOX(97.347272\, 105.639291\, 20.454582\, 5.633473)" +VNM,Vietnam,"BBOX(102.140745\, 109.464845\, 23.324164\, 8.559236)" +GUM,Guam,"BBOX(144.634154\, 144.953309\, 13.652291\, 13.235000)" +MHL,Marshall Is.,"BBOX(162.324963\, 171.378063\, 14.594027\, 5.600273)" +FSM,Micronesia,"BBOX(158.120100\, 163.042891\, 6.977636\, 5.261664)" +MNP,Northern Mariana Is.,"BBOX(145.572682\, 145.818082\, 15.268191\, 14.908054)" +UMI,Wake I.,"BBOX(166.608981\, 166.662200\, 19.324582\, 19.279445)" +BWA,Botswana,"BBOX(19.996109\, 29.373618\, -17.782082\, -26.875555)" +BDI,Burundi,"BBOX(28.985000\, 30.853191\, -2.301564\, -4.448055)" +ATF,French Southern & Antarctic Lands,"BBOX(51.650836\, 70.567491\, -46.327645\, -49.725009)" +HMD,Heard I. & McDonald Is.,"BBOX(73.234709\, 73.773882\, -52.965145\, -53.199445)" +KEN,Kenya,"BBOX(33.907218\, 41.905163\, 4.622500\, -4.669618)" +RWA,Rwanda,"BBOX(28.854445\, 30.893263\, -1.054446\, -2.825491)" +TZA,Tanzania,"BBOX(29.340827\, 40.436809\, -0.997218\, -11.740418)" +ZMB,Zambia,"BBOX(21.996391\, 33.702282\, -8.191664\, -18.074918)" +ZWE,Zimbabwe,"BBOX(25.237918\, 33.071591\, -15.616527\, -22.414764)" +ATA,Antarctica,"BBOX(-180.000000\, 180.000000\, -60.503336\, -90.000000)" +NOR,Bouvet I.,"BBOX(3.342363\, 3.484163\, -54.383609\, -54.462782)" +COM,Comoros,"BBOX(43.214027\, 44.530418\, -11.366946\, -12.383055)" +REU,Juan De Nova I.,"BBOX(42.723818\, 42.760900\, -17.052018\, -17.076118)" +LSO,Lesotho,"BBOX(27.013973\, 29.455554\, -28.570691\, -30.650527)" +MWI,Malawi,"BBOX(32.681873\, 35.920963\, -9.376673\, -17.135282)" +MOZ,Mozambique,"BBOX(30.213018\, 40.846109\, -10.471109\, -26.860282)" +ZAF,South Africa,"BBOX(16.483327\, 37.892218\, -22.136391\, -46.969727)" +SWZ,Swaziland,"BBOX(30.798336\, 32.133400\, -25.728336\, -27.316391)" +AGO,Angola,"BBOX(11.731245\, 24.084445\, -4.388991\, -18.016391)" +COG,Congo,"BBOX(11.140663\, 18.643609\, 3.711109\, -5.015000)" +ZAR,Congo\, DRC,"BBOX(12.214554\, 31.302773\, 5.380691\, -13.458055)" +FJI,Fiji,"BBOX(-180.000000\, 180.000000\, -16.153473\, -19.162782)" +GAB,Gabon,"BBOX(8.700836\, 14.519582\, 2.317900\, -3.925282)" +NAM,Namibia,"BBOX(11.716391\, 25.264427\, -16.954173\, -28.961873)" +NZL,New Zealand,"BBOX(-176.848755\, 178.841063\, -34.414718\, -52.578055)" +IOT,British Indian Ocean Territory,"BBOX(72.357900\, 72.494282\, -7.233473\, -7.436246)" +REU,Glorioso Is.,"BBOX(47.279091\, 47.303054\, -11.554100\, -11.577782)" +MDG,Madagascar,"BBOX(43.236827\, 50.501391\, -11.945555\, -25.588336)" +MUS,Mauritius,"BBOX(57.306309\, 63.495754\, -19.673336\, -20.520555)" +MYT,Mayotte,"BBOX(45.039163\, 45.293345\, -12.662500\, -12.992500)" +REU,Reunion,"BBOX(55.220554\, 55.853054\, -20.856527\, -21.373891)" +SYC,Seychelles,"BBOX(46.205691\, 55.540554\, -4.551664\, -9.463055)" +CXR,Christmas I.,"BBOX(105.629000\, 105.751900\, -10.384082\, -10.510973)" +CCK,Cocos Is.,"BBOX(96.817491\, 96.864845\, -12.130418\, -12.199446)" +IDN,Indonesia,"BBOX(95.210945\, 141.007018\, 5.913473\, -10.929655)" +TLS,Timor Leste,"BBOX(124.046100\, 127.308591\, -8.140000\, -9.463627)" +AUS,Australia,"BBOX(112.907209\, 158.960372\, -10.135691\, -54.753891)" +NRU,Nauru,"BBOX(166.904418\, 166.957045\, -0.493336\, -0.552218)" +NCL,New Caledonia,"BBOX(163.982745\, 168.130509\, -20.087918\, -22.673891)" +NFK,Norfolk I.,"BBOX(167.910945\, 167.998872\, -29.000555\, -29.081109)" +PNG,Papua New Guinea,"BBOX(140.858854\, 155.966845\, -1.355282\, -11.642500)" +SLB,Solomon Is.,"BBOX(155.671300\, 166.931836\, -6.605518\, -11.845836)" +TUV,Tuvalu,"BBOX(176.295254\, 179.232281\, -6.089446\, -8.561291)" +VUT,Vanuatu,"BBOX(166.521636\, 169.893863\, -13.707218\, -20.254173)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv new file mode 100644 index 0000000000000..aa540d40ad604 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv @@ -0,0 +1,249 @@ +id:keyword,name:keyword,shape:cartesian_shape +FLK, Falkland Is., "BBOX(-6806970.344651548\, -6426830.424971599\, -6665538.61144021\, -6862393.473674134)" +GUF, French Guiana, "BBOX(-6078465.2067807885\, -5749435.182262659\, 641770.3972926841\, 235324.11002137093)" +GUY, Guyana, "BBOX(-6833873.148626795\, -6286282.4434172455\, 953676.0708782381\, 132131.5480264357)" +PCN, Pitcairn Is., "BBOX(-1.448322847021477E7\, -1.428074532961791E7\, -2793163.209148463\, -2885847.742584221)" +SGS, South Georgia & the South Sandwich Is., "BBOX(-4232785.044058981\, -2921178.2834205604\, -7168210.949791082\, -8072797.261021951)" +SHN, St. Helena, "BBOX(-644849.5424266771\, -628429.9175369549\, -1793579.7338931332\, -1807264.3754193506)" +SUR, Suriname, "BBOX(-6464478.676752644\, -6009707.164829022\, 669343.5434865113\, 204444.85915446977)" +TTO, Trinidad & Tobago, "BBOX(-6893080.980145244\, -6737148.644965401\, 1271316.8958092\, 1123450.7268402777)" +VEN, Venezuela, "BBOX(-8168408.718739186\, -6657245.629555437\, 1368193.4618250781\, 72266.15206230174)" +ASM, American Samoa, "BBOX(-1.901595464265674E7\, -1.8986860848464E7\, -1603409.0061145446\, -1617338.5456514952)" +COK, Cook Is., "BBOX(-1.8462153311737206E7\, -1.755550270221884E7\, -1218650.60324631\, -2504423.3700605934)" +PYF, French Polynesia, "BBOX(-1.686465494432737E7\, -1.5452231241588091E7\, -981029.2116948966\, -2022435.6471389162)" +UMI, Jarvis I., "BBOX(-1.781614615792593E7\, -1.7812172052105166E7\, -41668.183668037316\, -44311.636360225275)" +NIU, Niue, "BBOX(-1.8918996368064713E7\, -1.889999624605927E7\, -2150619.798091522\, -2172080.175292089)" +WSM, Samoa, "BBOX(-1.9233784622210693E7\, -1.9083411248441823E7\, -1512399.049561015\, -1580814.199108954)" +TKL, Tokelau, "BBOX(-1.913167025144482E7\, -1.912956030181662E7\, -1025256.50252298\, -1030699.159919998)" +TON, Tonga, "BBOX(-1.9520985902791113E7\, -1.9359219424419094E7\, -2104146.80131666\, -2423871.209298853)" +WLF, Wallis & Futuna, "BBOX(-1.98360504519132E7\, -1.9605804230316367E7\, -1484290.6690231054\, -1611402.1249494848)" +ARG, Argentina, "BBOX(-8191144.166257678\, -5972291.682103194\, -2485194.106818803\, -7371901.253043402)" +BOL, Bolivia, "BBOX(-7754091.711639628\, -6403221.564728467\, -1082644.4605265881\, -2620063.8163838163)" +BRA, Brazil, "BBOX(-8238153.385337716\, -3873129.9144329783\, 587785.5079629741\, -3994093.243498929)" +CHL, Chile, "BBOX(-1.2183485121489162E7\, -7393910.374780716\, -1979723.0325789037\, -7538976.386388264)" +ECU, Ecuador, "BBOX(-1.0203977668829728E7\, -8373100.994630531\, 160069.96058917182\, -557339.7863215066)" +PRY, Paraguay, "BBOX(-6973472.910758704\, -6038403.325800699\, -2189911.7242244524\, -3196717.5348766074)" +PER, Peru, "BBOX(-9056413.424871765\, -7644744.579599449\, -4104.683866786337\, -2078385.864447083)" +URY, Uruguay, "BBOX(-6505356.195641661\, -5910875.717165678\, -3515982.318158614\, -4156248.8527274607)" +UMI, Baker I., "BBOX(-1.964428949334857E7\, -1.9642975923357394E7\, 24776.775336047573\, 23965.139003268785)" +CAN, Canada, "BBOX(-1.5696381156263582E7\, -5857338.166548977\, 1.7926778413967136E7\, 5112502.227274475)" +GTM, Guatemala, "BBOX(-1.0268864798128676E7\, -9820019.490616102\, 2016620.2477192462\, 1545072.9951440636)" +UMI, Howland I., "BBOX(-1.966381793765724E7\, -1.9662483105643325E7\, 90016.93033465231\, 87976.57940884378)" +UMI, Johnston Atoll, "BBOX(-1.8872988022526257E7\, -1.8871317228289172E7\, 1889449.6904405674\, 1888739.592498257)" +MEX, Mexico, "BBOX(-1.3180691242448486E7\, -9655698.786528189\, 3857992.7910224693\, 1637455.8925958527)" +UMI, Midway Is., "BBOX(-1.9747615131493594E7\, -1.974368555346914E7\, 3276930.956339718\, 3272211.297114333)" +BRB, Barbados, "BBOX(-6641259.148804331\, -6615392.506649243\, 1498269.4980028346\, 1465508.5364990495)" +DMA, Dominica, "BBOX(-6845190.333337227\, -6818396.733782433\, 1762138.8493679555\, 1712035.77580254)" +GRD, Grenada, "BBOX(-6877894.997852321\, -6856878.879868893\, 1372710.0161931934\, 1345360.731534649)" +GLP, Guadeloupe, "BBOX(-6879111.38592805\, -6811314.810418132\, 1864198.7087877272\, 1789672.9198651556)" +MTQ, Martinique, "BBOX(-6816263.407061167\, -6770111.459379609\, 1675390.1030315096\, 1620466.564996925)" +LCA, St. Lucia, "BBOX(-6799347.965159521\, -6776915.084016965\, 1586760.2747788534\, 1540902.846138527)" +SPM, St. Pierre & Miquelon, "BBOX(-6278172.373236121\, -6250088.469463722\, 5964272.744483719\, 5900906.394026551)" +VCT, St. Vincent & the Grenadines, "BBOX(-6821674.647507875\, -6803878.668434177\, 1503545.1028787405\, 1474620.605161206)" +ABW, Aruba, "BBOX(-7799006.120542209\, -7778434.278646477\, 1417237.7724451458\, 1392531.3743975367)" +BMU, Bermuda, "BBOX(-7216070.475135298\, -7199789.443011595\, 3813230.825275473\, 3797561.1925476543)" +DOM, Dominican Republic, "BBOX(-8015344.418919742\, -7605673.442087284\, 2264838.2331280783\, 1991268.1942175906)" +HTI, Haiti, "BBOX(-8289716.573465983\, -7973724.065068766\, 2283868.061303094\, 2040215.3097965734)" +JAM, Jamaica, "BBOX(-8724542.638268478\, -8484896.042272912\, 2098797.886578782\, 2002138.6713165536)" +ANT, Netherlands Antilles, "BBOX(-7699258.7361087445\, -7591201.908286172\, 1389429.1415046235\, 1348047.674912462)" +BHS, The Bahamas, "BBOX(-8791890.930189032\, -8097256.305860282\, 3114624.5106054945\, 2381778.6607825435)" +TCA, Turks & Caicos Is., "BBOX(-8018505.892457832\, -7917885.206619215\, 2506456.133236025\, 2443216.1674464582)" +BLZ, Belize, "BBOX(-9931524.217026532\, -9771579.370801603\, 2094970.9791089285\, 1791970.7485571986)" +CYM, Cayman Is., "BBOX(-9061499.6124054\, -9027238.590089742\, 2196677.690165189\, 2186160.351965059)" +COL, Colombia, "BBOX(-9097045.039005652\, -7443984.998678304\, 1412960.1248500098\, -472076.97756910085)" +CRI, Costa Rica, "BBOX(-9563612.298130559\, -9190693.005900422\, 1256252.842749445\, 896349.8334170822)" +CUB, Cuba, "BBOX(-9456916.57372173\, -8252253.557317591\, 2655499.846135876\, 2251949.753820664)" +SLV, El Salvador, "BBOX(-1.0030783799451409E7\, -9762126.342283737\, 1623823.8238794443\, 1477605.2302434247)" +HND, Honduras, "BBOX(-9946451.158864416\, -9254195.76601206\, 1855249.5859095547\, 1458038.3723417278)" +NIC, Nicaragua, "BBOX(-9761586.888031427\, -9254195.76601206\, 1691760.81737009\, 1199200.9443015517)" +PAN, Panama, "BBOX(-9242889.713250706\, -8593679.45241179\, 1075976.1383535631\, 804303.6245583462)" +AIA, Anguilla, "BBOX(-7031805.325801677\, -7010089.898777183\, 2069525.485454939\, 2056805.549131826)" +ATG, Antigua & Barbuda, "BBOX(-6889686.737551939\, -6864733.02654072\, 2005303.4210994085\, 1919628.1877410556)" +VGB, British Virgin Is., "BBOX(-7202202.070335221\, -7160573.590161418\, 2096726.335695059\, 2082531.6290789556)" +MSR, Montserrat, "BBOX(-6928185.136284053\, -6917269.703615838\, 1898992.8327456792\, 1882606.3105989075)" +PRI, Puerto Rico, "BBOX(-7488061.394454311\, -7269287.202979579\, 2098439.2297828426\, 2028446.302847273)" +KNA, St. Kitts & Nevis, "BBOX(-6997852.881114455\, -6971105.813106805\, 1968620.0064461157\, 1945153.7466145495)" +VIR, Virgin Is., "BBOX(-7238383.9104642505\, -7187072.749663104\, 2082975.2861753216\, 1999737.0895242055)" +FRO, Faroe Is., "BBOX(-827490.42907036\, -711300.1539736006\, 8944413.838654397\, 8715539.142798016)" +GRL, Greenland, "BBOX(-8132290.553358883\, -1353381.9599010698\, 1.841838614386466E7\, 8353191.775986784)" +XGK, Guernsey, "BBOX(-297068.19496499473\, -278407.0408089712\, 6361534.846607885\, 6346855.715083607)" +ISL, Iceland, "BBOX(-2731602.192501422\, -1502751.454502109\, 1.0025136653899286E7\, 9196525.03584683)" +IRL, Ireland, "BBOX(-1166041.2756762397\, -669370.2206187705\, 7435966.643781227\, 6700487.126114637)" +XIM, Isle of Man, "BBOX(-532903.6568742928\, -479640.2861633771\, 7249411.799394163\, 7180682.877256964)" +SJM, Jan Mayen, "BBOX(-1015223.6258196725\, -882597.5845070281\, 1.1464383304063711E7\, 1.1335539300648466E7)" +XJE, Jersey, "BBOX(-250175.41607230977\, -224308.77391722222\, 6319282.822387621\, 6303377.056271344)" +GBR, United Kingdom, "BBOX(-909665.4752870986\, 194747.32654372943\, 8589937.148187652\, 6438533.511709376)" +CPV, Cape Verde, "BBOX(-2823124.068441826\, -2523179.7117936057\, 1943228.8819694468\, 1667440.6983404886)" +CIV, Cote d'Ivoire, "BBOX(-958058.0616790326\, -276938.62540612154\, 1202097.1729137793\, 484115.97315150854)" +GHA, Ghana, "BBOX(-361664.8917125052\, 133893.0797566771\, 1249767.3181259448\, 526814.3511759888)" +GIB, Gibraltar, "BBOX(-596246.4508776823\, -593834.8254294725\, 4323115.767768943\, 4316053.421468498)" +LBR, Liberia, "BBOX(-1279319.9894917065\, -820246.5358469777\, 951144.4190395237\, 483992.16413836647)" +MAR, Morocco, "BBOX(-1466630.283495554\, -112634.06264437255\, 4289504.155676036\, 3206707.2043454945)" +PRT, Portugal, "BBOX(-3483078.5525721395\, -689118.2982827483\, 5183576.317394064\, 3847286.4078652565)" +ESP, Spain, "BBOX(-2022660.0079814764\, 480560.1191156738\, 5429039.221465501\, 3203347.2301618545)" +ESH, Western Sahara, "BBOX(-1903733.2771624175\, -964738.2330011163\, 3207048.827624554\, 2363772.158427126)" +BFA, Burkina Faso, "BBOX(-614576.7635071143\, 266936.0125622843\, 1698741.2811715933\, 1050643.0120585556)" +GIN, Guinea, "BBOX(-1678791.0953426699\, -851969.5850923934\, 1422911.1290510038\, 802936.7522689679)" +GNB, Guinea-Bissau, "BBOX(-1861013.9772984823\, -1518830.9983475052\, 1423734.7230846898\, 1223613.9918118552)" +MLI, Mali, "BBOX(-1363089.019496892\, 473262.6812172274\, 2875778.1558879707\, 1134962.1365298633)" +MRT, Mauritania, "BBOX(-1900842.0873479373\, -535013.6065024948\, 3159807.24053085\, 1657600.8186799039)" +SEN, Senegal, "BBOX(-1951740.3641577882\, -1265694.4838205066\, 1884840.6777415504\, 1380068.3247828495)" +SLE, Sierra Leone, "BBOX(-1480060.423460439\, -1142617.6510657615\, 1118607.3838558097\, 772615.2434245716)" +GMB, The Gambia, "BBOX(-1872579.0705148762\, -1536054.1273216614\, 1554306.33090056\, 1466584.8753009895)" +DJI, Djibouti, "BBOX(4648685.682234346\, 4833537.819242839\, 1426428.7393574219\, 1225554.7892715929)" +ERI, Eritrea, "BBOX(4056847.594510955\, 4800250.285874032\, 2036949.5002702742\, 1387149.8027029647)" +ETH, Ethiopia, "BBOX(3672630.3758422886\, 5342026.99671924\, 1675790.1336981696\, 379451.74027328007)" +MNG, Mongolia, "BBOX(9769520.962097632\, 1.3350714510090472E7\, 6825981.925445475\, 5099261.916823782)" +SDN, Sudan, "BBOX(2430004.2961371886\, 4297767.240203056\, 2539428.7064047027\, 389123.6754710965)" +UGA, Uganda, "BBOX(3292196.0161092333\, 3897263.9800336002\, 470504.09041435266\, -164337.88255462408)" +ISR, Gaza Strip, "BBOX(3808981.5012748297\, 3847078.1479647276\, 3710408.4677697835\, 3660903.6805555364)" +IRQ, Iraq, "BBOX(4318606.2488766555\, 5405751.393937016\, 4492721.642260634\, 3383496.8234396563)" +ISR, Israel, "BBOX(3814649.7784257433\, 3972002.8842663835\, 3931233.3769460395\, 3437740.2376509146)" +JOR, Jordan, "BBOX(3891775.929138256\, 4374979.440881939\, 3945530.7721081185\, 3399709.663800458)" +KAZ, Kazakhstan, "BBOX(5176263.146752886\, 9723558.146230904\, 7448249.257062752\, 4952703.862043582)" +NOR, Norway, "BBOX(533173.8292784104\, 3459090.2041849457\, 1.1455379410923388E7\, 7964779.911100031)" +RUS, Russia, "BBOX(-2.003750834E7\, 2.003750834E7\, 1.6850434409817755E7\, 5041380.846897432)" +SWE, Sweden, "BBOX(1237130.9043623458\, 2690259.1355019724\, 1.0769543191624273E7\, 7427971.135671626)" +ISR, West Bank, "BBOX(3883735.6562778493\, 3959702.080535439\, 3835248.5789866336\, 3678377.284759022)" +DZA, Algeria, "BBOX(-964830.2942199894\, 1334328.0705815821\, 4451638.686907341\, 2152156.534692522)" +AND, Andorra, "BBOX(158228.52231611632\, 198339.94046960064\, 5259751.808527718\, 5226573.156424563)" +CMR, Cameroon, "BBOX(946478.719567819\, 1804154.9870354445\, 1469444.988943757\, 184166.28005485257)" +CAF, Central African Republic, "BBOX(1605103.603700283\, 3056801.8246613783\, 1232201.6067875316\, 247331.9412217624)" +LBY, Libya, "BBOX(1036539.304552783\, 2799870.317374274\, 3918041.4975678376\, 2213781.647695001)" +MCO, Monaco, "BBOX(822751.2243894777\, 828138.0858677052\, 5429655.8071539095\, 5423375.498489419)" +TUN, Tunisia, "BBOX(834029.8925561006\, 1289264.82751983\, 4486662.225217784\, 3533714.341264127)" +BEN, Benin, "BBOX(86457.72966594121\, 429136.6369483333\, 1390883.792858654\, 693627.7186615759)" +TCD, Chad, "BBOX(1498576.8622784517\, 2671973.3506688518\, 2686597.2252112613\, 832635.3730826946)" +GNQ, Equatorial Guinea, "BBOX(937773.5353889967\, 1263909.364466394\, 419234.1992921709\, 103548.81812163288)" +KIR, Kiribati, "BBOX(-1.7541914599896543E7\, 1.9252428633165136E7\, 226366.04306531145\, 148735.3163895852)" +NER, Niger, "BBOX(18552.840291496777\, 1780740.379303719\, 2695306.478633392\, 1310820.5810745189)" +NGA, Nigeria, "BBOX(299727.7289191666\, 1630792.0233506353\, 1561771.5570046515\, 476092.4293577717)" +STP, Sao Tome & Principe, "BBOX(719695.6473290791\, 830830.0137936934\, 189409.56079307984\, 2041.1542177410504)" +TGO, Togo, "BBOX(-16671.65221684311\, 200130.18052028888\, 1247820.9113916112\, 680396.3710024672)" +ALB, Albania, "BBOX(2147190.0053688344\, 2343645.64081804\, 5260414.963633992\, 4814487.957249004)" +BIH, Bosnia & Herzegovina, "BBOX(1752234.5746612719\, 2184064.14141101\, 5663486.702317411\, 5246118.059706764)" +HRV, Croatia, "BBOX(1503346.4571803163\, 2162381.1083583334\, 5866635.618622956\, 5221085.75286942)" +ITA, Italy, "BBOX(737376.1880908412\, 2061018.5894331736\, 5957525.94908941\, 4390316.944679211)" +MKD, Macedonia, "BBOX(2277465.201675234\, 2563796.186476749\, 5214901.594868669\, 4991108.7995952675)" +MLT, Malta, "BBOX(1595108.1153038554\, 1621924.980632222\, 4299511.834205549\, 4273136.461790226)" +SMR, San Marino, "BBOX(1381134.799507896\, 1392730.2829452723\, 5463410.973754562\, 5449776.352704761)" +SMN, Serbia & Montenegro, "BBOX(2054214.9647958176\, 2560904.8853427777\, 5809419.7157107135\, 5138387.144313233)" +VTC, Vatican City, "BBOX(1385312.3973578045\, 1386786.8240131561\, 5147266.721875869\, 5146144.937762506)" +BGR, Bulgaria, "BBOX(2489690.801465982\, 3184309.173149079\, 5500283.923251328\, 5048257.112102198)" +CYP, Cyprus, "BBOX(3592264.716628652\, 3850099.91554189\, 4257858.611081361\, 4115102.5028513763)" +EGY, Egypt, "BBOX(2750348.3947484\, 4107224.6734649837\, 3717055.3733837567\, 2510824.567439936)" +GEO, Georgia, "BBOX(4453109.470762285\, 5199824.4735734565\, 5401399.644378745\, 5019430.87461186)" +GRC, Greece, "BBOX(2186314.7988755554\, 3143444.7899599737\, 5123271.623236523\, 4154446.48763015)" +LBN, Lebanon, "BBOX(3907406.1875188733\, 4076936.6437751846\, 4116080.386414876\, 3903547.2121638493)" +SYR, Syria, "BBOX(3964583.8854840077\, 4717533.78165415\, 4479682.761680629\, 3804547.447187875)" +TUR, Turkey, "BBOX(2857106.79203054\, 4989400.245782474\, 5177469.827842194\, 4275668.354346954)" +AUT, Austria, "BBOX(1061272.4916527711\, 1910952.9027368103\, 6278042.62617315\, 5845892.142474166)" +CZE, Czech Republic, "BBOX(1346264.5256192111\, 2098619.3077916563\, 6630584.029505155\, 6204126.892396778)" +DNK, Denmark, "BBOX(900899.5106663116\, 1686397.1108695522\, 7914142.641677729\, 7277306.821832056)" +HUN, Hungary, "BBOX(1793557.3715133998\, 2548637.4774590665\, 6203250.422795402\, 5740109.762720737)" +POL, Poland, "BBOX(1574907.6352293568\, 2687639.1199670266\, 7330108.850656106\, 6275356.531185668)" +SVK, Slovakia, "BBOX(1875145.4300552672\, 2511151.0842176126\, 6377430.961535088\, 6063294.76382884)" +SVN, Slovenia, "BBOX(1489841.399198138\, 1848779.9652620046\, 5921897.448055978\, 5688808.783113411)" +SJM, Svalbard, "BBOX(1167509.6910790894\, 3744509.3710375\, 1.6048121551074298E7\, 1.2655555793739378E7)" +BEL, Belgium, "BBOX(282936.63088871894\, 712244.3658943777\, 6710441.719074484\, 6361653.309031685)" +FRA, France, "BBOX(-533251.7529219548\, 1064461.2384661005\, 6637425.700005567\, 5066318.240535327)" +DEU, Germany, "BBOX(652888.8134116667\, 1673556.9642057894\, 7372844.587967681\, 5987030.890923241)" +LIE, Liechtenstein, "BBOX(1054711.6548248013\, 1072439.8403286163\, 5987002.506696636\, 5951457.074129165)" +LUX, Luxembourg, "BBOX(638355.4972931738\, 726251.3634604733\, 6477821.694262034\, 6351301.791746342)" +NLD, Netherlands, "BBOX(375242.7526416523\, 802721.8423723045\, 7069632.465484033\, 6577873.226207013)" +CHE, Switzerland, "BBOX(664244.403346417\, 1167542.0850509058\, 6074750.670815664\, 5753058.221661312)" +USA, United States, "BBOX(-1.9838976150769826E7\, 2.001256564961837E7\, 1.1523520412740182E7\, 2146164.589200235)" +BLR, Belarus, "BBOX(2578760.5316635333\, 3644606.1393169463\, 7591830.885400406\, 6665963.6751351105)" +EST, Estonia, "BBOX(2430923.1272140685\, 3138551.853062327\, 8325466.382266233\, 7867699.765386352)" +FIN, Finland, "BBOX(2171998.1104861163\, 3515688.0389226186\, 1.1097617254588177E7\, 8356849.0793245975)" +LVA, Latvia, "BBOX(2334214.876198328\, 3143213.0227801744\, 7984826.971795753\, 7493955.154644284)" +LTU, Lithuania, "BBOX(2331345.838962512\, 2984815.5174770574\, 7648495.086573079\, 7149414.5404388225)" +MDA, Moldova, "BBOX(2964993.634990694\, 3353912.54367185\, 6185122.9269956285\, 5692430.167578349)" +ROM, Romania, "BBOX(2255447.2082748064\, 3303096.1980072116\, 6150868.213605207\, 5407332.237900151)" +UKR, Ukraine, "BBOX(2465887.5773919513\, 4472677.433490184\, 6868872.82154549\, 5524305.8506691335)" +IND, India, "BBOX(7585780.649085295\, 1.0840351679187058E7\, 4232806.675603967\, 752682.9865532124)" +MDV, Maldives, "BBOX(8111115.582462115\, 8197263.621304713\, 784297.2010665077\, -71431.20290758506)" +OMN, Oman, "BBOX(5788534.594925483\, 6662146.69277591\, 3044819.2631402686\, 1879282.0779841878)" +SOM, Somalia, "BBOX(4562831.081569439\, 5723081.7399744\, 1343337.2289440092\, -186472.5685638059)" +LKA, Sri Lanka, "BBOX(8871728.267099438\, 9116138.224105384\, 1099474.3430723047\, 659969.3086218301)" +TKM, Turkmenistan, "BBOX(5705144.162508433\, 7421768.6339453105\, 5280998.579824433\, 4183738.4781891424)" +UZB, Uzbekistan, "BBOX(6233612.182953193\, 8144973.85086014\, 5711801.139928842\, 4464923.610179015)" +YEM, Yemen, "BBOX(4737309.24391286\, 6063959.275257026\, 2154858.799301538\, 1362182.6880406907)" +ARM, Armenia, "BBOX(4837295.297334552\, 5189774.327307057\, 5056256.290729958\, 4698942.432854185)" +AZE, Azerbaijan, "BBOX(4984760.226767874\, 5752658.326798638\, 5268048.77475221\, 4616618.723595905)" +BHR, Bahrain, "BBOX(5616438.669684706\, 5654628.379468894\, 3034905.550106453\, 2946160.3652355284)" +IRN, Iran, "BBOX(4901948.6557028685\, 7049893.741177648\, 4833901.247983729\, 2885079.0840316075)" +KWT, Kuwait, "BBOX(5181582.214661229\, 5389710.255315938\, 3514372.934498193\, 3317085.938189461)" +QAT, Qatar, "BBOX(5649679.671506368\, 5745847.577713873\, 3017981.013632691\, 2821312.488451719)" +SAU, Saudi Arabia, "BBOX(3848553.5764954956\, 6196722.907460272\, 3783666.794569951\, 1848481.0463722278)" +ARE, United Arab Emirates, "BBOX(5742229.694263595\, 6276378.014364274\, 3009473.8025495554\, 2587735.5585281393)" +AFG, Afghanistan, "BBOX(6735292.615095663\, 8339581.582762433\, 4646317.28372925\, 3427436.851842879)" +KGZ, Kyrgyzstan, "BBOX(7708819.076615721\, 8936904.82707441\, 5345044.727405903\, 4749710.205362992)" +NPL, Nepal, "BBOX(8911370.139640821\, 9817772.840653224\, 3558261.041954822\, 3044776.39805181)" +PAK, Pakistan, "BBOX(6775605.521527455\, 8663319.92396695\, 4447583.65883328\, 2715440.846640232)" +TJK, Tajikistan, "BBOX(7499004.100397766\, 8369832.209103333\, 5019609.3336218465\, 4393464.385496015)" +BGD, Bangladesh, "BBOX(9800998.997143846\, 1.0315904296110207E7\, 3076839.5287209633\, 2361476.7409209567)" +BTN, Bhutan, "BBOX(9879820.321061922\, 1.025410784115321E7\, 3290010.9896438504\, 3086490.161301852)" +BRN, Brunei, "BBOX(1.2701006428488798E7\, 1.2841845733150518E7\, 563234.0022074429\, 447670.0898939893)" +CHN, China, "BBOX(8195345.9204370845\, 1.5002356674063785E7\, 7086089.890077106\, 2057325.3856844143)" +JPN, Japan, "BBOX(1.3767868049134541E7\, 1.623176311896106E7\, 5698420.16133248\, 2784071.2548644035)" +PRK, North Korea, "BBOX(1.3839679250759868E7\, 1.4549170017730366E7\, 5312900.3745006835\, 4533106.558340659)" +PLW, Palau, "BBOX(1.4967181830048332E7\, 1.4990157059749957E7\, 863059.693444481\, 815429.4880146481)" +PHL, Philippines, "BBOX(1.3018814446461111E7\, 1.4092828900986778E7\, 2201037.2202695687\, 562799.2811739098)" +KOR, South Korea, "BBOX(1.4037278471337833E7\, 1.4425544602525068E7\, 4668132.414354527\, 3920844.3714562915)" +KHM, Cambodia, "BBOX(1.139316126476325E7\, 1.1982027233402364E7\, 1655642.1223870981\, 1166706.2324655629)" +LAO, Laos, "BBOX(1.1142120562289124E7\, 1.1988580834463434E7\, 2571654.2509495416\, 1565804.2404149454)" +MYS, Malaysia, "BBOX(1.1092089575631922E7\, 1.32777233218629E7\, 820779.1279511156\, 94934.7631846226)" +MMR, Myanmar, "BBOX(1.0264212645289583E7\, 1.126212909591956E7\, 3318054.720285839\, 1100761.292465509)" +SGP, Singapore, "BBOX(1.1537257221127674E7\, 1.157699827933534E7\, 160905.1210847127\, 140165.52511697204)" +THA, Thailand, "BBOX(1.0836648747645825E7\, 1.1759712080245482E7\, 2326960.8760532974\, 628128.2178646458)" +VNM, Vietnam, "BBOX(1.137025572106285E7\, 1.2185570803468373E7\, 2671268.1479721097\, 956373.5794062541)" +GUM, Guam, "BBOX(1.610060037235469E7\, 1.613612854443387E7\, 1534354.7088998647\, 1486593.2644101644)" +MHL, Marshall Is., "BBOX(1.8069932221681617E7\, 1.9077718703641918E7\, 1642457.1731015244\, 624414.5801310536)" +FSM, Micronesia, "BBOX(1.76018490137313E7\, 1.8149851601056725E7\, 778674.0289479959\, 586550.7704269526)" +MNP, Northern Mariana Is., "BBOX(1.6205076831395375E7\, 1.6232394634432243E7\, 1720127.7032804906\, 1678605.9653024632)" +UMI, Wake I., "BBOX(1.854682692392445E7\, 1.8552751235904157E7\, 2193187.709933591\, 2187863.8226788775)" +BWA, Botswana, "BBOX(2225956.6714169392\, 3269856.198060967\, -2012057.3125287183\, -3107932.575048184)" +BDI, Burundi, "BBOX(3226595.4401938887\, 3434561.510989516\, -256277.86419111618\, -495653.34463959694)" +ATF, French Southern & Antarctic Lands, "BBOX(5749744.761766512\, 7855537.163585416\, -5833010.924598094\, -6398787.743617378)" +HMD, Heard I. & McDonald Is., "BBOX(8152450.513138738\, 8212470.976939865\, -6976553.288377103\, -7019975.393962887)" +KEN, Kenya, "BBOX(3774534.2414511004\, 4664861.406119774\, 515133.4762737857\, -520395.9201280237)" +RWA, Rwanda, "BBOX(3212062.1240753955\, 3439022.3056239635\, -117387.0182772328\, -314659.7811132031)" +TZA, Tanzania, "BBOX(3266205.9206388732\, 4501404.98655826\, -111015.40498408281\, -1316180.4208213643)" +ZMB, Zambia, "BBOX(2448627.045068894\, 3751720.8702890654\, -915014.476700008\, -2046319.4302683398)" +ZWE, Zimbabwe, "BBOX(2809472.180051312\, 3681512.6693309383\, -1760356.671722378\, -2561396.0054164226)" +ATA, Antarctica, "BBOX(-2.003750834E7\, 2.003750834E7\, -8512662.881033322\, -4.748140766343476E9)" +NOR, Bouvet I., "BBOX(372070.1471544857\, 387855.25094677455\, -7243144.612387524\, -7258293.454237509)" +COM, Comoros, "BBOX(4810563.480097139\, 4957103.455881589\, -1273745.795821429\, -1389333.8616461232)" +REU, Juan De Nova I., "BBOX(4755993.663842456\, 4760121.613199477\, -1926881.0822095312\, -1929687.4249448022)" +LSO, Lesotho, "BBOX(3007181.718244638\, 3278977.271857335\, -3321117.2692412077\, -3587446.106149188)" +MWI, Malawi, "BBOX(3638129.460024005\, 3998703.3094073967\, -1048497.2089351554\, -1936578.3607502843)" +MOZ, Mozambique, "BBOX(3363297.7786198338\, 4546968.054133605\, -1172181.8581435068\, -3106026.6491282047)" +ZAF, South Africa, "BBOX(1834915.5679635953\, 4218142.412200545\, -2527908.4975596936\, -5937134.146607068)" +SWZ, Swaziland, "BBOX(3428455.080322901\, 3577073.7249586442\, -2965472.9128583763\, -3163056.5390926218)" +AGO, Angola, "BBOX(1305916.2195893514\, 2681068.153065396\, -489058.770192694\, -2039467.1713562359)" +COG, Congo, "BBOX(1240172.93208683\, 2075397.0601399948\, 413407.92638141196\, -558981.4471095677)" +ZAR, Congo\, DRC, "BBOX(1359717.9313576685\, 3484608.750292371\, 599858.1461695591\, -1512112.8916449302)" +FJI, Fiji, "BBOX(-2.003750834E7\, 2.003750834E7\, -1822502.649701532\, -2174110.2636207)" +GAB, Gabon, "BBOX(968572.632860957\, 1616312.474546188\, 258097.85802697268\, -437302.607003333)" +NAM, Namibia, "BBOX(1304262.6798733384\, 2812423.14843234\, -1915491.159689654\, -3370794.2160844747)" +NZL, New Zealand, "BBOX(-1.9686713351283982E7\, 1.9908496063316472E7\, -4084625.39078185\, -6905327.726548656)" +IOT, British Indian Ocean Territory, "BBOX(8054844.581749367\, 8070026.5565406205\, -807374.1159864698\, -830132.9519243974)" +REU, Glorioso Is., "BBOX(5263084.334556216\, 5265751.883513724\, -1295003.534066991\, -1297694.4422191991)" +MDG, Madagascar, "BBOX(4813101.564486872\, 5621789.129689449\, -1339512.841638736\, -2948183.285092941)" +MUS, Mauritius, "BBOX(6379309.136233983\, 7068315.001831045\, -2234372.9783939887\, -2334800.8501905375)" +MYT, Mayotte, "BBOX(5013736.69021733\, 5042032.101022207\, -1421199.6616333937\, -1458875.4272419864)" +REU, Reunion, "BBOX(6147123.9517467795\, 6217533.529663724\, -2374779.1643490326\, -2436517.3438334884)" +SYC, Seychelles, "BBOX(5143593.993155349\, 6182746.188795668\, -507222.7096158059\, -1058244.6497234497)" +CXR, Christmas I., "BBOX(1.175856649136589E7\, 1.1772247656782478E7\, -1162331.3692172004\, -1176694.9418773586)" +CCK, Cocos Is., "BBOX(1.0777673796502084E7\, 1.0782945219668373E7\, -1360554.4203425802\, -1368415.0936628287)" +IDN, Indonesia, "BBOX(1.0598833913871005E7\, 1.5696829439852942E7\, 659456.6237303711\, -1224130.4157647756)" +TLS, Timor Leste, "BBOX(1.3808748684969299E7\, 1.4171927521756383E7\, -909204.3581778448\, -1058309.2029773812)" +AUS, Australia, "BBOX(1.2568773011020126E7\, 1.7695387664886124E7\, -1134231.265244234\, -7314247.137263005)" +NRU, Nauru, "BBOX(1.8579714820321366E7\, 1.8585573231162526E7\, -54918.590898148344\, -61473.57829423625)" +NCL, New Caledonia, "BBOX(1.8254475669742182E7\, 1.871620264608858E7\, -2283448.9342597914\, -2592628.621050228)" +NFK, Norfolk I., "BBOX(1.869176089341545E7\, 1.870154888228107E7\, -3375716.673702962\, -3385973.448600687)" +PNG, Papua New Guinea, "BBOX(1.5680335898821346E7\, 1.7362149763616595E7\, -150883.37308403326\, -1305049.300451269)" +SLB, Solomon Is., "BBOX(1.7329249844714675E7\, 1.858276697811951E7\, -736957.2370687899\, -1328168.5471204517)" +TUV, Tuvalu, "BBOX(1.962509790181899E7\, 1.9952046251859576E7\, -679153.8120624761\, -956604.9181074377)" +VUT, Vanuatu, "BBOX(1.8537103723002467E7\, 1.8912498315429542E7\, -1540647.6688226506\, -2303165.641357482)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8dd9704fd2d4b..d5aca3d7cea4c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -315,6 +315,17 @@ from employees | where birth_date > now() | sort emp_no asc | keep emp_no, birth emp_no:integer | birth_date:date ; +autoBucketYearInAgg#[skip:-8.12.99, reason:date type is supported in 8.13] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket = AUTO_BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) +| STATS COUNT(*) by bucket +| sort bucket; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + autoBucketMonthInAgg // tag::auto_bucket_in_agg[] @@ -398,6 +409,19 @@ dd_oo:integer null ; +docsDateDiff#[skip:-8.12.99, reason:date_diff added in 8.13] +// tag::docsDateDiff[] +ROW date1 = TO_DATETIME("2023-12-02T11:00:00.000Z"), date2 = TO_DATETIME("2023-12-02T11:00:00.001Z") +| EVAL dd_ms = DATE_DIFF("microseconds", date1, date2) +// end::docsDateDiff[] +; + +// tag::docsDateDiff-result[] +date1:date | date2:date | dd_ms:integer +2023-12-02T11:00:00.000Z | 2023-12-02T11:00:00.001Z | 1000 +// end::docsDateDiff-result[] +; + evalDateParseWithSimpleDate row a = "2023-02-01" | eval b = date_parse("yyyy-MM-dd", a) | keep b; @@ -906,11 +930,11 @@ FROM employees //end::docsAutoBucketWeeklyHistogram-result[] ; -docsAutoBucketLast24hr +docsAutoBucketLast24hr#[skip:-8.12.99, reason:date type is supported in 8.13] //tag::docsAutoBucketLast24hr[] FROM sample_data | WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() -| EVAL bucket = AUTO_BUCKET(@timestamp, 25, DATE_FORMAT(NOW() - 1 day), DATE_FORMAT(NOW())) +| EVAL bucket = AUTO_BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) | STATS COUNT(*) BY bucket //end::docsAutoBucketLast24hr[] ; @@ -918,11 +942,11 @@ FROM sample_data COUNT(*):long | bucket:date ; -docsGettingStartedAutoBucket +docsGettingStartedAutoBucket#[skip:-8.12.99, reason:date type is supported in 8.13] // tag::gs-auto_bucket[] FROM sample_data | KEEP @timestamp -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) // end::gs-auto_bucket[] | LIMIT 0 ; @@ -934,7 +958,7 @@ docsGettingStartedAutoBucketStatsBy // tag::gs-auto_bucket-stats-by[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS COUNT(*) BY bucket // end::gs-auto_bucket-stats-by[] | SORT bucket @@ -949,7 +973,7 @@ docsGettingStartedAutoBucketStatsByMedian // tag::gs-auto_bucket-stats-by-median[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS median_duration = MEDIAN(event_duration) BY bucket // end::gs-auto_bucket-stats-by-median[] | SORT bucket @@ -1048,3 +1072,29 @@ hires:long | year:date 1 |1999-01-01T00:00:00.000Z // end::docsDateTruncHistogram-result[] ; + +docsNow +// tag::docsNow[] +ROW current_date = NOW() +// end::docsNow[] +| EVAL y = SUBSTRING(DATE_FORMAT("yyyy", current_date), 0, 2) +| KEEP y +; + +// tag::docsNow-result[] +y:keyword +20 +// end::docsNow-result[] +; + +docsNowWhere +// tag::docsNowWhere[] +FROM sample_data +| WHERE @timestamp > NOW() - 1 hour +// end::docsNowWhere[] +; + +// tag::docsNowWhere-result[] +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +// end::docsNowWhere-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index e4f1726b3e1ff..441f6d8a264fe 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -262,6 +262,24 @@ FROM sample_data @timestamp:date | client_ip:ip | event_duration:long | message:keyword | duration_ms:double ; +docsLength +// tag::length[] +FROM employees +| KEEP first_name, last_name +| EVAL fn_length = LENGTH(first_name) +// end::length[] +| SORT first_name +| LIMIT 3 +; + +// tag::length-result[] +first_name:keyword | last_name:keyword | fn_length:integer +Alejandro |McAlpine |9 +Amabile |Gomatam |7 +Anneke |Preusig |6 +// end::length-result[] +; + docsGettingStartedEvalNoColumnName // tag::gs-eval-no-column-name[] FROM sample_data @@ -319,7 +337,7 @@ Parto |Bamford |6.004230000000001 // end::evalReplace-result[] ; -docsEvalUnnamedColumn +docsEvalUnnamedColumn#[skip:-8.12.99,reason:expression spaces are maintained since 8.13] // tag::evalUnnamedColumn[] FROM employees | SORT emp_no @@ -329,7 +347,7 @@ FROM employees | LIMIT 3; // tag::evalUnnamedColumn-result[] -first_name:keyword | last_name:keyword | height:double | height*3.281:double +first_name:keyword | last_name:keyword | height:double | height * 3.281:double Georgi |Facello |2.03 |6.66043 Bezalel |Simmel |2.08 |6.82448 Parto |Bamford |1.83 |6.004230000000001 @@ -348,4 +366,4 @@ FROM employees avg_height_feet:double 5.801464200000001 // end::evalUnnamedColumnStats-result[] -; \ No newline at end of file +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ignore-case.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ignore-case.csv-spec new file mode 100644 index 0000000000000..7d90171d1d305 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ignore-case.csv-spec @@ -0,0 +1,135 @@ + +simpleFilter#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "mary" | keep emp_no, first_name, last_name; + +emp_no:integer | first_name:keyword | last_name:keyword +10011 | Mary | Sluis +; + + +simpleFilterUpper#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "MARY" | keep emp_no, first_name, last_name; + +emp_no:integer | first_name:keyword | last_name:keyword +10011 | Mary | Sluis +; + + +simpleFilterPartial#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "mar" | keep emp_no, first_name, last_name; + +emp_no:integer | first_name:keyword | last_name:keyword +; + + +mixedConditionsAnd#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "mary" AND emp_no == 10011 | keep emp_no, first_name, last_name; + +emp_no:integer | first_name:keyword | last_name:keyword +10011 | Mary | Sluis +; + + +mixedConditionsOr#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "mary" OR emp_no == 10001 | keep emp_no, first_name, last_name |sort emp_no; + +emp_no:integer | first_name:keyword | last_name:keyword +10001 | Georgi | Facello +10011 | Mary | Sluis +; + + +evalEquals#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where emp_no == 10001 +| eval a = first_name =~ "georgi", b = first_name == "georgi", c = first_name =~ "GEORGI", d = first_name =~ "Geor", e = first_name =~ "GeoRgI" +| keep emp_no, first_name, a, b, c, d, e; + +emp_no:integer | first_name:keyword | a:boolean | b:boolean | c:boolean | d:boolean | e:boolean +10001 | Georgi | true | false | true | false | true +; + + +//waiting for final decisions on supporting generic expressions on the right +//https://github.com/elastic/elasticsearch/issues/103599 +constantsAndFolding-Ignore +row name = "foobar" | where "FoObAr" =~ name; + +name:keyword +foobar +; + + +noWildcardSimple#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +row name = "foobar" | where name =~ "FoOb*"; + +name:keyword +; + + +noWildcard#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "Georg*" | sort emp_no | keep emp_no, first_name; + +emp_no:integer | first_name:keyword +; + + +noWildcardSingle#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ "Georg?" | sort emp_no | keep emp_no, first_name; + +emp_no:integer | first_name:keyword +; + + +//waiting for final decisions on supporting generic expressions on the right +//https://github.com/elastic/elasticsearch/issues/103599 +fieldRight-Ignore +from employees | where "Guoxiang" =~ first_name | keep emp_no, first_name; + +emp_no:integer | first_name:keyword +10015 | Guoxiang +; + + +expressionsRight#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where first_name =~ concat("Tzv","ETAN") | keep emp_no, first_name; + +emp_no:integer | first_name:keyword +10007 | Tzvetan +; + + +expressionsLeft#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where concat(first_name, "_foo") =~ "TzvETAN_fOo" | keep emp_no, first_name; + +emp_no:integer | first_name:keyword +10007 | Tzvetan +; + + +//waiting for final decisions on supporting generic expressions on the right +//https://github.com/elastic/elasticsearch/issues/103599 +expressionsLeftRight-Ignore +from employees | where substring(first_name, 1, 2) =~ substring(last_name, -2) | keep emp_no, first_name, last_name | sort emp_no; + +emp_no:integer | first_name:keyword | last_name:keyword +10055 | Georgy | Dredge +10091 | Amabile | Gomatam +; + + +multiValuesExcluded#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +row a = ["Foo", "Bar"] | where a =~ "foo"; + +a:keyword +; + + +multiValuesPushedDownExcluded#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +from employees | where job_positions =~ "reporting analyst" | sort emp_no | keep emp_no, job_positions; +warning:Line 1:24: evaluation of [job_positions =~ \"reporting analyst\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value + +emp_no:integer | job_positions:keyword +10013 | Reporting Analyst +10026 | Reporting Analyst +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json index 78f85112bd516..ce7dc45caf44a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json @@ -14,6 +14,15 @@ }, "location": { "type": "geo_point" + }, + "country": { + "type": "keyword" + }, + "city": { + "type": "keyword" + }, + "city_location": { + "type": "geo_point" } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json new file mode 100644 index 0000000000000..eb386b84ff70b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "geo_shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json new file mode 100644 index 0000000000000..303c828c84285 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/null.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/null.csv-spec index 0d7fed9028fe4..92537ed1221e8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/null.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/null.csv-spec @@ -56,7 +56,7 @@ COUNT(emp_no):long // end::is-not-null-result[] ; -coalesceSimple +coalesceSimple#[skip:-8.12.99,reason:expression spaces are maintained since 8.13] // tag::coalesce[] ROW a=null, b="b" | EVAL COALESCE(a, b) @@ -64,7 +64,7 @@ ROW a=null, b="b" ; // tag::coalesce-result[] -a:null | b:keyword | COALESCE(a,b):keyword +a:null | b:keyword | COALESCE(a, b):keyword null | b | b // end::coalesce-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index a7dc82263a86e..e1c1b276a90eb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -9,91 +9,96 @@ v:long showFunctions#[skip:-8.12.99] show functions; - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean -abs |"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |"integer|long|double|unsigned_long" | "" | false | false -acos |"double acos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -asin |"double asin(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -atan |"double atan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false -atan2 |"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" |[y, x] |["integer|long|double|unsigned_long", "integer|long|double|unsigned_long"] |["", ""] |double | "" | [false, false] | false -auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] | "double|date" | "" | [false, false, false, false] | false -avg |? avg(arg1:?) |arg1 |? | "" |? | "" | false | false -case |? case(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -ceil |"? ceil(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -cidr_match |? cidr_match(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -coalesce |? coalesce(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -concat |? concat(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -cos |"double cos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false -cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false -count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false -count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false -date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_format |? date_format(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|[A valid date pattern, A string representing a date]|date |Parses a string into a date value | [true, false] | false -date_trunc |? date_trunc(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -e |? e() | null | null | null |? | "" | null | false -ends_with |? ends_with(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -floor |"? floor(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -greatest |"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true -least |"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true -left |"? left(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false -length |? length(arg1:?) |arg1 |? | "" |? | "" | false | false -log10 |"? log10(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false -max |? max(arg1:?) |arg1 |? | "" |? | "" | false | false -median |? median(arg1:?) |arg1 |? | "" |? | "" | false | false -median_absolute_deviation|? median_absolute_deviation(arg1:?) |arg1 |? | "" |? | "" | false | false -min |? min(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_avg |? mv_avg(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false -mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false -mv_dedupe |"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" |v | "boolean|date|double|ip|text|integer|keyword|version|long" | "" |? | "Remove duplicate values from a multivalued field." | false | false -mv_first |"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the first value." | false | false -mv_last |"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the last value." | false | false -mv_max |"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false -mv_median |? mv_median(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_min |"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false -mv_sum |? mv_sum(arg1:?) |arg1 |? | "" |? | "" | false | false -now |? now() | null |null | null |? | "" | null | false -percentile |? percentile(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -pi |? pi() | null | null | null |? | "" | null | false -pow |"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" |[base, exponent] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["", ""] |? | "" | [false, false] | false -replace |"? replace(arg1:?, arg2:?, arg3:?)" | [arg1, arg2, arg3] | [?, ?, ?] |["", "", ""] |? | "" | [false, false, false]| false -right |"? right(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false -round |? round(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false -sin |"double sin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" |An angle, in radians |double |Returns the trigonometric sine of an angle | false | false -sinh |"double sinh(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false -split |? split(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -sqrt |"? sqrt(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -starts_with |? starts_with(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -substring |? substring(arg1:?, arg2:?, arg3:?) |[arg1, arg2, arg3] |[?, ?, ?] |["", "", ""] |? | "" | [false, false, false]| false -sum |? sum(arg1:?) |arg1 |? | "" |? | "" | false | false -tan |"double tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false -tanh |"double tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false -tau |? tau() | null | null | null |? | "" | null | false -to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false -to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false -to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | |false |false -to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false -to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false -to_degrees |"double to_degrees(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false -to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false -to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false -to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | |false |false -to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false -to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false -to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | |false |false -to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point" | |long | |false |false -to_radians |"double to_radians(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false -to_str |"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false -to_string |"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false -to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false -to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false -to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false -trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading and trailing whitespaces from a string.| false | false + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "The arccosine of an angle, expressed in radians." | false | false | false +asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "" |double | "Inverse sine trigonometric function." | false | false | false +atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Inverse tangent trigonometric function." | false | false | false +atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false +auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false +avg |"double avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "The average of a numeric field." | false | false | true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false +ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false +concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false +cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false +cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true +count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false +date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +e |double e() | null | null | null |double | "Euler’s number." | null | false | false +ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false +floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false +greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false +least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false +left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false +length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false +ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false +max |"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The minimum value of a numeric field." | false | false | true +mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false +mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false +mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false +mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false +mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false +now |date now() | null |null | null |date | "Returns current date and time." | null | false | false +percentile |"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long|unsigned_long, double|integer|long"] |["", ""] |"double|integer|long|unsigned_long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false +pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false +replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false +right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false +round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false +rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false +sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false +sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false +sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false +st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false +substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false +sum |"long sum(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |long | "The sum of a numeric field." | false | false | true +tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false +tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false +to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" |v |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false +to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false +to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_geoshape |"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" |v |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false +to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false +to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false +to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false +to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false +to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false +to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false ; @@ -101,87 +106,92 @@ showFunctionsSynopsis#[skip:-8.12.99] show functions | keep synopsis; synopsis:keyword -"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" -"double acos(n:integer|long|double|unsigned_long)" -"double asin(n:integer|long|double|unsigned_long)" -"double atan(n:integer|long|double|unsigned_long)" -"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" -? avg(arg1:?) -? case(arg1:?, arg2...:?) -"? ceil(n:integer|long|double|unsigned_long)" -? cidr_match(arg1:?, arg2...:?) -? coalesce(arg1:?, arg2...:?) -? concat(arg1:?, arg2...:?) -"double cos(n:integer|long|double|unsigned_long)" -"double cosh(n:integer|long|double|unsigned_long)" -? count(arg1:?) -? count_distinct(arg1:?, arg2:?) +"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" +"double acos(n:double|integer|long|unsigned_long)" +"double asin(n:double|integer|long|unsigned_long)" +"double atan(n:double|integer|long|unsigned_long)" +"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double avg(field:double|integer|long|unsigned_long)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" +boolean cidr_match(ip:ip, blockX...:keyword) +"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" +"keyword concat(first:keyword|text, rest...:keyword|text)" +"double cos(n:double|integer|long|unsigned_long)" +"double cosh(n:double|integer|long|unsigned_long)" +"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -? date_extract(arg1:?, arg2:?) -? date_format(arg1:?, arg2:?) +long date_extract(date_part:keyword, field:date) +keyword date_format(?format:keyword, date:date) "date date_parse(?datePattern:keyword, dateString:keyword|text)" -? date_trunc(arg1:?, arg2:?) -? e() -? ends_with(arg1:?, arg2:?) -"? floor(n:integer|long|double|unsigned_long)" -"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -? left(string:keyword, length:integer) -? length(arg1:?) -"? log10(n:integer|long|double|unsigned_long)" +"date date_trunc(interval:keyword, date:date)" +double e() +"boolean ends_with(str:keyword|text, suffix:keyword|text)" +"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" +"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"keyword left(str:keyword|text, length:integer)" +"integer length(str:keyword|text)" +"double log10(n:double|integer|long|unsigned_long)" "keyword|text ltrim(str:keyword|text)" -? max(arg1:?) -? median(arg1:?) -? median_absolute_deviation(arg1:?) -? min(arg1:?) -? mv_avg(arg1:?) +"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" +"double mv_avg(field:double|integer|long|unsigned_long)" "keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" -"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_median(arg1:?) -"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_sum(arg1:?) -? now() -? percentile(arg1:?, arg2:?) -? pi() -"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" -? replace(arg1:?, arg2:?, arg3:?) -? right(string:keyword, length:integer) -? round(arg1:?, arg2:?) +"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" +date now() +"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" +double pi() +"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" +"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" +"keyword right(str:keyword|text, length:integer)" +"double round(value:double, ?decimals:integer)" "keyword|text rtrim(str:keyword|text)" -"double sin(n:integer|long|double|unsigned_long)" -"double sinh(n:integer|long|double|unsigned_long)" -? split(arg1:?, arg2:?) -"? sqrt(n:integer|long|double|unsigned_long)" -? starts_with(arg1:?, arg2:?) -? substring(arg1:?, arg2:?, arg3:?) -? sum(arg1:?) -"double tan(n:integer|long|double|unsigned_long)" -"double tanh(n:integer|long|double|unsigned_long)" -? tau() +"double sin(n:double|integer|long|unsigned_long)" +"double sinh(n:double|integer|long|unsigned_long)" +"keyword split(str:keyword|text, delim:keyword|text)" +"double sqrt(n:double|integer|long|unsigned_long)" +"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"boolean starts_with(str:keyword|text, prefix:keyword|text)" +"keyword substring(str:keyword|text, start:integer, ?length:integer)" +"long sum(field:double|integer|long|unsigned_long)" +"double tan(n:double|integer|long|unsigned_long)" +"double tanh(n:double|integer|long|unsigned_long)" +double tau() "boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" +"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" +"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" "date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" "double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"double to_degrees(v:double|long|unsigned_long|integer)" +"double to_degrees(v:double|integer|long|unsigned_long)" "double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(v:geo_point|keyword|text)" +"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" +"geo_point to_geopoint(v:geo_point|keyword|text)" +"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" "integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "ip to_ip(v:ip|keyword|text)" -"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" -"double to_radians(v:double|long|unsigned_long|integer)" -"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_lower(str:keyword|text)" +"double to_radians(v:double|integer|long|unsigned_long)" +"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_upper(str:keyword|text)" "version to_ver(v:keyword|text|version)" "version to_version(v:keyword|text|version)" "keyword|text trim(str:keyword|text)" @@ -196,9 +206,9 @@ SHOW functions ; // tag::showFunctionsFiltered-result[] - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean -sin | "double sin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false -sinh | "double sinh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false +sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false // end::showFunctionsFiltered-result[] ; @@ -208,5 +218,5 @@ countFunctions#[skip:-8.12.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -84 | 84 | 84 +89 | 89 | 89 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 1719ef6298e71..5c4aae740910b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -12,8 +12,8 @@ POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006 convertFromString#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] // tag::to_geopoint-str[] -row wkt = "POINT(42.97109630194 14.7552534413725)" -| eval pt = to_geopoint(wkt) +ROW wkt = "POINT(42.97109630194 14.7552534413725)" +| EVAL pt = TO_GEOPOINT(wkt) // end::to_geopoint-str[] ; @@ -31,22 +31,255 @@ wkt:keyword ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] |[POINT(42.97109630194 14.7552534413725), POINT(75.8092915005895 22.727749187571)] ; +centroidFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +row wkt = "POINT(42.97109629958868 14.7552534006536)" +| STATS c = ST_CENTROID(TO_GEOPOINT(wkt)); + +c:geo_point +POINT(42.97109629958868 14.7552534006536) +; + +centroidFromString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:geo_point +POINT(42.97109629958868 14.7552534006536) +; + +centroidFromString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:geo_point +POINT(59.390193899162114 18.741501288022846) +; + +centroidFromString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:geo_point +POINT(39.58327988510707 20.619513023697994) +; + simpleLoad#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; -abbrev:keyword | location:geo_point | name:text | scalerank:i | type:k -CJJ | POINT(127.495916124681 36.7220227766673) | Cheongju Int'l | 9 | major -HOD | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid -IDR | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | 9 | mid -IXC | POINT(76.8017261105242 30.6707248949667) | Chandigarh Int'l | 9 | [major, military] -LYP | POINT(72.9878190922305 31.3627435480862) | Faisalabad Int'l | 9 | [mid, military] -MLG | POINT(112.711418617258 -7.92998002840567) | Abdul Rachman Saleh | 9 | [mid, military] -OMS | POINT(73.3163595376585 54.9576482934059) | Omsk Tsentralny | 9 | mid -OVB | POINT(82.6671524525865 55.0095847136264) | Novosibirsk Tolmachev | 9 | mid -OZH | POINT(35.3018728575279 47.8732635579023) | Zaporozhye Int'l | 9 | [mid, military] -TRZ | POINT(78.7089578747476 10.7603571306554) | Tiruchirappalli | 9 | mid -WIIT | POINT(105.176060419161 -5.242566777132) | Radin Inten II | 9 | mid -ZAH | POINT(60.900708564915 29.4752941956573) | Zahedan Int'l | 9 | mid +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +CJJ | Cheongju | POINT(127.4833 36.6333) | South Korea | POINT(127.495916124681 36.7220227766673) | Cheongju Int'l | 9 | major +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | 9 | mid +IXC | Chandīgarh | POINT(76.78 30.75) | India | POINT(76.8017261105242 30.6707248949667) | Chandigarh Int'l | 9 | [major, military] +LYP | Faisalabad | POINT(73.0911 31.4167) | Pakistan | POINT(72.9878190922305 31.3627435480862) | Faisalabad Int'l | 9 | [mid, military] +MLG | Malang | POINT(112.62 -7.98) | Indonesia | POINT(112.711418617258 -7.92998002840567) | Abdul Rachman Saleh | 9 | [mid, military] +OMS | Omsk | POINT(73.3833 54.9667) | Russia | POINT(73.3163595376585 54.9576482934059) | Omsk Tsentralny | 9 | mid +OVB | Novosibirsk | POINT(82.9167 55.0333) | Russia | POINT(82.6671524525865 55.0095847136264) | Novosibirsk Tolmachev | 9 | mid +OZH | Zaporizhzhia | POINT(35.1175 47.85) | Ukraine | POINT(35.3018728575279 47.8732635579023) | Zaporozhye Int'l | 9 | [mid, military] +TRZ | Trichinopoly | POINT(78.7047 10.7903) | India | POINT(78.7089578747476 10.7603571306554) | Tiruchirappalli | 9 | mid +WIIT | Bandar Lampung | POINT(105.2667 -5.45) | Indonesia | POINT(105.176060419161 -5.242566777132) | Radin Inten II | 9 | mid +ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran | POINT(60.900708564915 29.4752941956573) | Zahedan Int'l | 9 | mid +; + +centroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +// tag::st_centroid-airports[] +FROM airports +| STATS centroid=ST_CENTROID(location) +// end::st_centroid-airports[] +; + +// tag::st_centroid-airports-result[] +centroid:geo_point +POINT(-0.030548143003023033 24.37553649504829) +// end::st_centroid-airports-result[] +; + +centroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(TO_GEOPOINT(location)) +; + +centroid:geo_point +POINT (-0.03054810272375508 24.37553651570554) +; + +centroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT(-0.030548143003023033 24.37553649504829) | 891 +; + +centroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| SORT scalerank DESC +; + +centroid:geo_point | count:long | scalerank:i +POINT(83.27726172452623 28.99289782286029) | 33 | 9 +POINT(-12.330427954750142 29.554613442537242) | 247 | 8 +POINT(19.934784222002094 13.864835376774234) | 133 | 7 +POINT(-10.861430599274028 28.170889387807705) | 151 | 6 +POINT(9.394940837974781 28.953888530174837) | 46 | 5 +POINT(-3.118828632340757 17.868389564340685) | 194 | 4 +POINT(-26.976065734634176 42.907839377294295) | 24 | 3 +POINT(1.2588642098541771 24.379140841774642) | 63 | 2 +; + +centroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE scalerank == 9 +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT(83.27726172452623 28.99289782286029) | 33 +; + +centroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +; + +centroid:geo_point | count:long +POINT (7.572387259169772 26.836561792945492) | 891 +; + +centroidFromAirportsCountCityLocations#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(city_location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (1.3965610809060276 24.127649406297987) | 891 +; + +centroidFromAirportsCountGroupedCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country +| SORT count DESC, country ASC +| WHERE count >= 10 +; + +centroid:geo_point | count:long | country:k +POINT (-97.3333946136801 38.07953176370194) | 129 | United States +POINT (78.42264595516026 21.91585598140955) | 50 | India +POINT (-102.37784670852125 24.268197756260633) | 45 | Mexico +POINT (112.92023897966052 32.982985347554816) | 41 | China +POINT (-94.40291355867443 50.70210267953273) | 37 | Canada +POINT (-47.3366032685003 -15.80931615144495) | 31 | Brazil +POINT (57.30444226807986 55.01281536452902) | 26 | Russia +null | 19 | null +POINT (140.68777053945644 -27.688147084349218) | 17 | Australia +POINT (-2.594152985359816 54.359511745107525) | 17 | United Kingdom +POINT (-64.19630772720735 -34.977900019058815) | 13 | Argentina +POINT (3.710366631858051 46.890841646818444) | 12 | France +POINT (107.94202494900674 -4.528175020823255) | 12 | Indonesia +POINT (10.15085451220247 50.663009069605984) | 11 | Germany +POINT (11.015199956230141 43.04051815532148) | 11 | Italy +POINT (6.725663595240224 9.201645437966693) | 11 | Nigeria +POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan +; + +centroidFromAirportsFilteredCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE country == "United States" +| STATS centroid=ST_CENTROID(city_location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-97.3333946136801 38.07953176370194) | 129 +; + +centroidFromAirportsCountGroupedCountryCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country +| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +; + +centroid:geo_point | count:long +POINT (17.55538044598613 18.185558743854063) | 891 +; + +centroidFromAirportsCountryCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() +; + +airports:geo_point | cities:geo_point | count:long +POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.127649406297987) | 891 +; + +centroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE scalerank == 9 +| SORT abbrev +| WHERE length(name) > 12 +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT(78.73736493755132 26.761841227998957) | 12 +; + +centroidFromAirportsAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| MV_EXPAND type +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT(2.121611400672094 24.559172889205755) | 933 +; + +centroidFromAirportsGroupedAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| MV_EXPAND type +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| SORT scalerank DESC +; + +centroid:geo_point | count:long | scalerank:i +POINT(83.16847535921261 28.79002037679311) | 40 | 9 +POINT(-9.579701727760353 29.651473146404623) | 266 | 8 +POINT(21.64429362312379 14.766539423726499) | 142 | 7 +POINT(-9.082370867592193 28.242454005495436) | 155 | 6 +POINT(9.394940837974781 28.953888530174837) | 46 | 5 +POINT(-1.6692755030477562 17.78088210212057) | 197 | 4 +POINT(-26.976065734634176 42.907839377294295) | 24 | 3 +POINT(1.2588642098541771 24.379140841774642) | 63 | 2 +; + +centroidFromAirportsGroupedAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE scalerank == 9 +| MV_EXPAND type +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +; + +centroid:geo_point | count:long | scalerank:i +POINT(83.16847535921261 28.79002037679311) | 40 | 9 +; + +centroidFromAirportsAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE scalerank == 9 +| MV_EXPAND type +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT(83.16847535921261 28.79002037679311) | 40 ; geoPointEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] @@ -107,9 +340,9 @@ wkt:keyword |pt:geo_point convertCartesianFromString#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] // tag::to_cartesianpoint-str[] -row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] -| mv_expand wkt -| eval pt = to_cartesianpoint(wkt) +ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) // end::to_cartesianpoint-str[] ; @@ -124,15 +357,53 @@ convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_po row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); -wkt:keyword |pt:cartesian_point +wkt:keyword |pt:cartesian_point ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] |[POINT(4297.11 -1475.53), POINT(7580.93 2272.77)] ; +centroidCartesianFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +row wkt = "POINT(4297.10986328125 -1475.530029296875)" +| STATS c = ST_CENTROID(TO_CARTESIANPOINT(wkt)); + +c:cartesian_point +POINT(4297.10986328125 -1475.530029296875) +; + +centroidFromCartesianString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:cartesian_point +POINT(4297.10986328125 -1475.530029296875) +; + +centroidFromCartesianString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:cartesian_point +POINT(5939.02001953125 398.6199951171875) +; + +centroidFromCartesianString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| STATS c = ST_CENTROID(pt); + +c:cartesian_point +POINT(3949.163965353159 1078.2645465797348) +; + simpleCartesianLoad#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k -CJJ | POINT(14192780.461221408 4400430.851323913) | Cheongju Int'l | 9 | major +CJJ | POINT (14192780.461221408 4400430.851323913) | Cheongju Int'l | 9 | major HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid IDR | POINT (8439051.727244465 2599127.5424638605) | Devi Ahilyabai Holkar Int'l | 9 | mid OMS | POINT (8161539.810548711 7353650.845101996) | Omsk Tsentralny | 9 | mid @@ -142,6 +413,80 @@ WIIT | POINT (11708145.489503577 -584415.9142832769) | Radin Inten II ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l | 9 | mid ; +cartesianCentroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| STATS centroid=ST_CENTROID(location); + +centroid:cartesian_point +POINT(-266681.67563861894 3053301.5120195406) +; + +cartesianCentroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| STATS centroid=ST_CENTROID(TO_CARTESIANPOINT(location)); + +centroid:cartesian_point +POINT (-266681.66530554957 3053301.506061676) +; + +cartesianCentroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT(-266681.67563861894 3053301.5120195406) | 849 +; + +cartesianCentroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| SORT scalerank DESC +; + +centroid:cartesian_point | count:long | scalerank:i +POINT(9289013.153846154 3615537.0533353365) | 26 | 9 +POINT(-1729861.3231565242 3781377.2572642546) | 228 | 8 +POINT(2001203.8796622984 1588548.3963268648) | 124 | 7 +POINT(-1417910.8585910928 3496034.7206865167) | 147 | 6 +POINT(1045839.9891304348 3582296.444293478) | 46 | 5 +POINT(-513618.93804196664 2279874.075660586) | 191 | 4 +POINT(-3002961.9270833335 5451641.91796875) | 24 | 3 +POINT(140136.12878224207 3081220.7881944445) | 63 | 2 +; + +cartesianCentroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| WHERE scalerank == 9 +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT(9289013.153846154 3615537.0533353365) | 26 +; + +cartesianCentroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| WHERE scalerank == 9 +| SORT abbrev +| WHERE length(name) > 12 +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT(9003597.4375 3429344.0078125) | 8 +; + +cartesianCentroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports_web +| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +; + +centroid:cartesian_point | count:long +POINT (726480.0130685265 3359566.331716279) | 849 +; + cartesianPointEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec new file mode 100644 index 0000000000000..f8ba7b9cb8cbe --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -0,0 +1,189 @@ +############################################### +# Tests for GEO_SHAPE type +# + +convertFromString#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +// tag::to_geoshape-str[] +ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" +| EVAL geom = TO_GEOSHAPE(wkt) +// end::to_geoshape-str[] +; + +// tag::to_geoshape-str-result[] +wkt:keyword | geom:geo_shape +"POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" | POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) +// end::to_geoshape-str-result[] +; + +convertFromStringArray#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| eval pt = to_geoshape(wkt); + +wkt:keyword |pt:geo_shape +["POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))", "POINT(75.8092915005895 22.727749187571)"] |[POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10)), POINT(75.8092915005895 22.727749187571)] +; + +convertFromStringViaPoint#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +ROW wkt = "POINT (30 10)" +| EVAL point = TO_GEOPOINT(wkt) +| EVAL shape = TO_GEOSHAPE(point) +; + +wkt:keyword | point:geo_point | shape:geo_shape +"POINT (30 10)" | POINT (30 10) | POINT (30 10) +; + +# need to work out how to upload WKT +simpleLoad#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +FROM countries_bbox | WHERE id == "ISL"; + +id:keyword| name:keyword| shape:geo_shape +ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) +; + +simpleLoadPointsAsShapes#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +FROM airports +| WHERE abbrev == "CPH" OR abbrev == "VLC" +| SORT abbrev +| EVAL location = TO_GEOSHAPE(location), city_location = TO_GEOSHAPE(city_location) +| KEEP abbrev, name, location, country, city, city_location +; + +abbrev:keyword | name:text | location:geo_shape | country:keyword | city:keyword | city_location:geo_shape +"CPH" | "Copenhagen" | POINT(12.6493508684508 55.6285017221528) | "Denmark" | "Copenhagen" | POINT(12.5683 55.6761) +"VLC" | "Valencia" | POINT(-0.473474930771676 39.4914597884489) | "Spain" | "Paterna" | POINT(-0.4406 39.5028) +; + +geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] + +ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| MV_EXPAND wkt +| EVAL pt = to_geoshape(wkt) +| WHERE pt == to_geoshape("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") +; + +wkt:keyword |pt:geo_shape +"POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" |POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) +; + +geo_shapeNotEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| MV_EXPAND wkt +| EVAL pt = to_geoshape(wkt) +| WHERE pt != to_geoshape("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") +; + +wkt:keyword |pt:geo_shape +"POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) +; + +convertFromStringParseError#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] +| mv_expand wkt +| eval pt = to_geoshape(wkt) +; + +warning:Line 3:13: evaluation of [to_geoshape(wkt)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: pointx +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +wkt:keyword |pt:geo_shape +"POINTX(42.97109630194 14.7552534413725)" |null +"POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) +"POINT(111)" |null +; + +############################################### +# Tests for CARTESIAN_SHAPE type +# + +convertCartesianShapeFromString#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +// tag::to_cartesianshape-str[] +ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] +| MV_EXPAND wkt +| EVAL geom = TO_CARTESIANSHAPE(wkt) +// end::to_cartesianshape-str[] +; + +// tag::to_cartesianshape-str-result[] +wkt:keyword |geom:cartesian_shape +"POINT(4297.11 -1475.53)" |POINT(4297.11 -1475.53) +"POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) +// end::to_cartesianshape-str-result[] +; + +convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_shape only added in 8.13] +row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| eval pt = to_cartesianshape(wkt); + +wkt:keyword |pt:cartesian_shape +["POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] |[POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97)), POINT(7580.93 2272.77)] +; + +convertCartesianFromStringViaPoint#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +ROW wkt = "POINT (3010 -1010)" +| EVAL point = TO_CARTESIANPOINT(wkt) +| EVAL shape = TO_CARTESIANSHAPE(point) +; + +wkt:keyword | point:cartesian_point | shape:cartesian_shape +"POINT (3010 -1010)" | POINT (3010 -1010) | POINT (3010 -1010) +; + +# need to work out how to upload WKT +simpleCartesianShapeLoad#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +FROM countries_bbox_web | WHERE id == "ISL"; + +id:keyword| name:keyword|shape:cartesian_shape +ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9196525.03584683) +; + +simpleLoadCartesianPointsAsShapes#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +FROM airports_web +| WHERE abbrev == "CPH" OR abbrev == "VLC" +| SORT abbrev +| EVAL location = TO_CARTESIANSHAPE(location) +; + +abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cartesian_shape +"CPH" | "Copenhagen" | 3 | "major" | POINT(1408119.2975413958 7484813.53657096) +"VLC" | "Valencia" | 8 | "mid" | POINT(-52706.98819688343 4792315.469321795) +; + +cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| MV_EXPAND wkt +| EVAL pt = to_cartesianshape(wkt) +| WHERE pt == to_cartesianshape("POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))") +; + +wkt:keyword |pt:cartesian_shape +"POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) +; + +cartesianShapeNotEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| MV_EXPAND wkt +| EVAL pt = to_cartesianshape(wkt) +| WHERE pt != to_cartesianshape("POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))") +; + +wkt:keyword |pt:cartesian_shape +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) +; + +convertCartesianShapeFromStringParseError#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] +| mv_expand wkt +| eval pt = to_cartesianshape(wkt) +; + +warning:Line 3:13: evaluation of [to_cartesianshape(wkt)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: pointx +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +wkt:keyword |pt:cartesian_shape +"POINTX(4297.11 -1475.53)" |null +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) +"POINT(111)" |null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index ded080023f5c4..0dd2f4f937421 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -879,3 +879,107 @@ AVG(salary):double | avg_salary_rounded:double 48248.55 | 48249.0 // end::statsUnnamedColumnEval-result[] ; + +nestedExpressionNoGrouping#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS s = SUM(emp_no + 3), c = COUNT(emp_no) +; + +s: long | c: long +1005350 | 100 +; + +nestedExpressionInSurrogateAgg#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS a = AVG(emp_no % 5), s = SUM(emp_no % 5), c = COUNT(emp_no % 5) +; + +a:double | s:long | c:long +2.0 | 200 | 100 +; + +nestedExpressionInGroupingWithAlias#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS s = SUM(emp_no % 5), c = COUNT(emp_no % 5) BY l = languages + 20 +| SORT l +; + +s:long | c:long | l : i +39 | 15 | 21 +36 | 19 | 22 +30 | 17 | 23 +32 | 18 | 24 +43 | 21 | 25 +20 | 10 | null +; + +nestedMultiExpressionInGroupingsAndAggs#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| EVAL sal = salary + 10000 +| STATS sum(sal), sum(salary + 10000) BY left(first_name, 1), concat(gender, to_string(languages)) +| SORT `left(first_name, 1)`, `concat(gender, to_string(languages))` +| LIMIT 5 +; + +sum(sal):l | sum(salary + 10000):l | left(first_name, 1):s | concat(gender, to_string(languages)):s +54307 | 54307 | A | F2 +70335 | 70335 | A | F3 +76817 | 76817 | A | F5 +123675 | 123675 | A | M3 +43370 | 43370 | B | F2 +; + +nestedExpressionMultipleParams#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS p = percentile(emp_no + 10, 50), m = median(emp_no + 10) BY languages +| SORT languages +; + +p:double | m:double | languages:integer +10053.0 | 10053.0 | 1 +10069.0 | 10069.0 | 2 +10068.0 | 10068.0 | 3 +10060.5 | 10060.5 | 4 +10076.0 | 10076.0 | 5 +10034.5 | 10034.5 | null +; + +groupByNull#[skip:-8.12.99,reason:bug fixed in 8.13+] +ROW a = 1, c = null +| STATS COUNT(a) BY c; + +COUNT(a):long | c:null + 1 | null +; + +groupByNullAndString#[skip:-8.12.99,reason:bug fixed in 8.13+] +ROW a = 1, b = "foo", c = null +| STATS COUNT(a) BY c, b; + +COUNT(a):long | c:null | b:keyword + 1 | null | foo +; + +groupByStringAndNull#[skip:-8.12.99,reason:bug fixed in 8.13+] +ROW a = 1, b = "foo", c = null +| STATS COUNT(a) BY b, c; + +COUNT(a):long | b:keyword | c:null + 1 | foo | null +; + +countNull#[skip:-8.12.99,reason:bug fixed in 8.13+] +ROW a = 1, c = null +| STATS COUNT(c) BY a; + +COUNT(c):long | a:integer + 0 | 1 +; + +countDistinctNull#[skip:-8.99.99,reason:not yet fixed] +ROW a = 1, c = null +| STATS COUNT_DISTINCT(c) BY a; + +COUNT(c):long | a:integer + 0 | 1 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec index 68f67b8a2743b..8f926fd8f6ed7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec @@ -87,7 +87,7 @@ COUNT_DISTINCT(ip0):long | COUNT_DISTINCT(ip1):long // end::count-distinct-result[] ; -countDistinctOfIpPrecision +countDistinctOfIpPrecision#[skip:-8.12.99,reason:expression spaces are maintained since 8.13] // tag::count-distinct-precision[] FROM hosts | STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5) @@ -95,7 +95,7 @@ FROM hosts ; // tag::count-distinct-precision-result[] -COUNT_DISTINCT(ip0,80000):long | COUNT_DISTINCT(ip1,5):long +COUNT_DISTINCT(ip0, 80000):long | COUNT_DISTINCT(ip1, 5):long 7 | 9 // end::count-distinct-precision-result[] ; @@ -152,3 +152,16 @@ m:long | languages:i 20 | 5 10 | null ; + + +countDistinctWithGroupPrecisionAndNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +from employees | stats m = count_distinct(height + 5, 9876) by languages | sort languages; + +m:long | languages:i +13 | 1 +16 | 2 +14 | 3 +15 | 4 +20 | 5 +10 | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec index f1849107d606d..091a625c7e10d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec @@ -77,7 +77,7 @@ m:double | p50:double 0 | 0 ; -medianOfInteger#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfInteger#[skip:-8.12.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765/Expression spaces are maintained since 8.13] // tag::median[] FROM employees | STATS MEDIAN(salary), PERCENTILE(salary, 50) @@ -85,7 +85,7 @@ FROM employees ; // tag::median-result[] -MEDIAN(salary):double | PERCENTILE(salary,50):double +MEDIAN(salary):double | PERCENTILE(salary, 50):double 47003 | 47003 // end::median-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index b8b80df389f9c..bdbcfb3cb49e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -952,3 +952,56 @@ Bamford |true Bernatsky |false // end::endsWith-result[] ; + + + +toLowerRow#[skip:-8.12.99] +// tag::to_lower[] +ROW message = "Some Text" +| EVAL message_lower = TO_LOWER(message) +// end::to_lower[] +; + +// tag::to_lower-result[] +message:keyword | message_lower:keyword +Some Text | some text +// end::to_lower-result[] +; + + +toLower#[skip:-8.12.99] +from employees | sort emp_no | eval name_lower = TO_LOWER(first_name) | keep emp_no, first_name, name_lower | limit 1; + +emp_no:integer | first_name:keyword | name_lower:keyword +10001 | Georgi | georgi +; + + +toUpperRow#[skip:-8.12.99] +// tag::to_upper[] +ROW message = "Some Text" +| EVAL message_upper = TO_UPPER(message) +// end::to_upper[] +; + +// tag::to_upper-result[] +message:keyword | message_upper:keyword +Some Text | SOME TEXT +// end::to_upper-result[] +; + + +toUpper#[skip:-8.12.99] +from employees | sort emp_no | eval name_upper = TO_UPPER(first_name) | keep emp_no, first_name, name_upper | limit 1; + +emp_no:integer | first_name:keyword | name_upper:keyword +10001 | Georgi | GEORGI +; + + +toUpperLowerUnicode#[skip:-8.12.99] +row a = "π/2 + a + B + Λ ºC" | eval lower = to_lower(a), upper = to_upper(a) | keep a, upper, lower; + +a:keyword | upper:keyword | lower:keyword +π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index e3a01bd6f4dd9..26c11c3af0f45 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -7,15 +7,14 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.operator.exchange.ExchangeService; -import org.elasticsearch.core.TimeValue; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; @@ -29,38 +28,47 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.xpack.enrich.EnrichPlugin; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.After; +import org.junit.Before; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { - private static final String REMOTE_CLUSTER = "cluster_a"; @Override protected Collection remoteClusterAlias() { - return List.of(REMOTE_CLUSTER); + return List.of("c1", "c2"); + } + + protected Collection allClusters() { + return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); } @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); plugins.add(EsqlPlugin.class); - plugins.add(InternalExchangePlugin.class); plugins.add(LocalStateEnrich.class); plugins.add(IngestCommonPlugin.class); plugins.add(ReindexPlugin.class); @@ -72,63 +80,179 @@ protected Settings nodeSettings() { return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); } - public static class InternalExchangePlugin extends Plugin { - @Override - public List> getSettings() { - return List.of( - Setting.timeSetting( - ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, - TimeValue.timeValueSeconds(30), - Setting.Property.NodeScope - ) - ); + @Before + public void setupHostsEnrich() { + // the hosts policy are identical on every node + Map allHosts = Map.of( + "192.168.1.2", + "Windows", + "192.168.1.3", + "MacOS", + "192.168.1.4", + "Linux", + "192.168.1.5", + "Android", + "192.168.1.6", + "iOS", + "192.168.1.7", + "Windows", + "192.168.1.8", + "MacOS", + "192.168.1.9", + "Linux", + "192.168.1.10", + "Linux", + "192.168.1.11", + "Windows" + ); + for (String cluster : allClusters()) { + Client client = client(cluster); + client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); + for (Map.Entry h : allHosts.entrySet()) { + client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); + } + client.admin().indices().prepareRefresh("hosts").get(); + EnrichPolicy policy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); + client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request("hosts", policy)).actionGet(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request("hosts")).actionGet(); + assertAcked(client.admin().indices().prepareDelete("hosts")); } } - public void testUnsupportedEnrich() { - Client localClient = client(LOCAL_CLUSTER); - localClient.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); - record Host(String ip, String os) { + @Before + public void setupEventsIndices() { + record Event(long timestamp, String user, String host) { } - var hosts = List.of(new Host("192.168.1.3", "Windows")); - for (var h : hosts) { - localClient.prepareIndex("hosts").setSource("ip", h.ip, "os", h.os).get(); + List e0 = List.of( + new Event(1, "matthew", "192.168.1.3"), + new Event(2, "simon", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "andrew", "192.168.1.7"), + new Event(5, "simon", "192.168.1.20"), + new Event(6, "kevin", "192.168.1.2"), + new Event(7, "akio", "192.168.1.5"), + new Event(8, "luke", "192.168.1.2"), + new Event(9, "jack", "192.168.1.4") + ); + List e1 = List.of( + new Event(1, "andres", "192.168.1.2"), + new Event(2, "sergio", "192.168.1.6"), + new Event(3, "kylian", "192.168.1.8"), + new Event(4, "andrew", "192.168.1.9"), + new Event(5, "jack", "192.168.1.3"), + new Event(6, "kevin", "192.168.1.4"), + new Event(7, "akio", "192.168.1.7"), + new Event(8, "kevin", "192.168.1.21"), + new Event(9, "andres", "192.168.1.8") + ); + List e2 = List.of( + new Event(1, "park", "192.168.1.25"), + new Event(2, "akio", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "kevin", "192.168.1.3") + ); + for (var c : Map.of(LOCAL_CLUSTER, e0, "c1", e1, "c2", e2).entrySet()) { + Client client = client(c.getKey()); + client.admin() + .indices() + .prepareCreate("events") + .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") + .get(); + for (var e : c.getValue()) { + client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); + } + client.admin().indices().prepareRefresh("events").get(); } - localClient.admin().indices().prepareRefresh("hosts").get(); - EnrichPolicy policy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); - localClient.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request("hosts", policy)).actionGet(); - localClient.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request("hosts")).actionGet(); - assertAcked(client(LOCAL_CLUSTER).admin().indices().prepareDelete("hosts")); + } - record Event(String ip, String message) { + @After + public void wipeEnrichPolicies() { + for (String cluster : allClusters()) { + cluster(cluster).wipe(Set.of()); + for (String policy : List.of("hosts")) { + client(cluster).execute(DeleteEnrichPolicyAction.INSTANCE, new DeleteEnrichPolicyAction.Request(policy)); + } + } + } + static String enrichCommand(String policy, Enrich.Mode mode) { + if (mode == Enrich.Mode.ANY && randomBoolean()) { + return "ENRICH " + policy; } - for (String cluster : List.of(LOCAL_CLUSTER, REMOTE_CLUSTER)) { - var events = List.of(new Event("192.168.1.4", "access denied"), new Event("192.168.1.3", "restart")); - assertAcked(client(cluster).admin().indices().prepareCreate("events").setMapping("ip", "type=ip", "message", "type=text")); - for (Event e : events) { - client(cluster).prepareIndex("events").setSource("ip", e.ip, "message", e.message).get(); + return "ENRICH[ccq.mode: " + mode + "] " + policy + " "; + } + + public void testWithHostsPolicy() { + for (Enrich.Mode mode : List.of(Enrich.Mode.ANY)) { + String enrich = enrichCommand("hosts", mode); + String query = "FROM events | eval ip= TO_STR(host) | " + enrich + " | stats c = COUNT(*) by os | SORT os"; + try (EsqlQueryResponse resp = runQuery(query)) { + List> rows = getValuesList(resp); + assertThat( + rows, + equalTo( + List.of( + List.of(2L, "Android"), + List.of(1L, "Linux"), + List.of(1L, "MacOS"), + List.of(4L, "Windows"), + Arrays.asList(1L, (String) null) + ) + ) + ); } - client(cluster).admin().indices().prepareRefresh("events").get(); } - List queries = List.of( - "FROM *:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | LIMIT 1", - "FROM events*,*:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | LIMIT 1", - "FROM *:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | STATS COUNT(*) BY ip | LIMIT 1", - "FROM events*,*:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | STATS COUNT(*) BY ip | LIMIT 1" - ); - for (String q : queries) { - Exception error = expectThrows(IllegalArgumentException.class, () -> runQuery(q).close()); - assertThat(error.getMessage(), containsString("cross clusters query doesn't support enrich yet")); + for (Enrich.Mode mode : List.of(Enrich.Mode.ANY)) { + String enrich = enrichCommand("hosts", mode); + String query = "FROM *:events | eval ip= TO_STR(host) | " + enrich + " | stats c = COUNT(*) by os | SORT os"; + try (EsqlQueryResponse resp = runQuery(query)) { + List> rows = getValuesList(resp); + assertThat( + rows, + equalTo( + List.of( + List.of(1L, "Android"), + List.of(2L, "Linux"), + List.of(4L, "MacOS"), + List.of(3L, "Windows"), + List.of(1L, "iOS"), + Arrays.asList(2L, (String) null) + ) + ) + ); + } + } + + for (Enrich.Mode mode : List.of(Enrich.Mode.ANY)) { + String enrich = enrichCommand("hosts", mode); + String query = "FROM *:events,events | eval ip= TO_STR(host) | " + enrich + " | stats c = COUNT(*) by os | SORT os"; + try (EsqlQueryResponse resp = runQuery(query)) { + List> rows = getValuesList(resp); + assertThat( + rows, + equalTo( + List.of( + List.of(3L, "Android"), + List.of(3L, "Linux"), + List.of(5L, "MacOS"), + List.of(7L, "Windows"), + List.of(1L, "iOS"), + Arrays.asList(3L, (String) null) + ) + ) + ); + } } } - @After - public void cleanClusters() { - cluster(LOCAL_CLUSTER).wipe(Set.of()); - client(LOCAL_CLUSTER).execute(DeleteEnrichPolicyAction.INSTANCE, new DeleteEnrichPolicyAction.Request("hosts")); - cluster(REMOTE_CLUSTER).wipe(Set.of()); + public void testUnsupportedEnrichMode() { + for (Enrich.Mode mode : List.of(Enrich.Mode.REMOTE, Enrich.Mode.COORDINATOR)) { + String enrich = enrichCommand("hosts", mode); + String q = "FROM *:events | eval ip= TO_STR(host) | " + enrich + " | stats c = COUNT(*) by os | SORT os"; + Exception error = expectThrows(IllegalArgumentException.class, () -> runQuery(q).close()); + assertThat(error.getMessage(), containsString("Enrich modes COORDINATOR and REMOTE are not supported yet")); + } } protected EsqlQueryResponse runQuery(String query) { @@ -163,7 +287,7 @@ public EnrichTransportXPackInfoAction( } @Override - protected List infoActions() { + protected List> infoActions() { return Collections.singletonList(XPackInfoFeatureAction.ENRICH); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index daefa8899b443..1d2bff3cf360c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.Client; @@ -32,6 +33,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; @@ -339,7 +341,7 @@ public EnrichTransportXPackInfoAction( } @Override - protected List infoActions() { + protected List> infoActions() { return Collections.singletonList(XPackInfoFeatureAction.ENRICH); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 80da888eb4dfb..04e46d8ff5425 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -39,6 +39,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -1049,7 +1050,8 @@ public void testShowFunctions() { new ColumnInfo("returnType", "keyword"), new ColumnInfo("description", "keyword"), new ColumnInfo("optionalArgs", "boolean"), - new ColumnInfo("variadic", "boolean") + new ColumnInfo("variadic", "boolean"), + new ColumnInfo("isAggregation", "boolean") ) ) ); @@ -1313,7 +1315,7 @@ public void testStatsNestFields() { } } - public void testStatsMissingFields() { + public void testStatsMissingFieldWithStats() { final String node1, node2; if (randomBoolean()) { internalCluster().ensureAtLeastNumDataNodes(2); @@ -1352,6 +1354,39 @@ public void testStatsMissingFields() { } } + public void testStatsMissingFieldKeepApp() { + final String node1, node2; + if (randomBoolean()) { + internalCluster().ensureAtLeastNumDataNodes(2); + node1 = randomDataNode().getName(); + node2 = randomValueOtherThan(node1, () -> randomDataNode().getName()); + } else { + node1 = randomDataNode().getName(); + node2 = randomDataNode().getName(); + } + assertAcked( + client().admin() + .indices() + .prepareCreate("foo-index") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node1)) + .setMapping("foo_int", "type=integer", "foo_long", "type=long", "foo_float", "type=float", "foo_double", "type=double") + ); + assertAcked( + client().admin() + .indices() + .prepareCreate("bar-index") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node2)) + .setMapping("bar_int", "type=integer", "bar_long", "type=long", "bar_float", "type=float", "bar_double", "type=double") + ); + String command = String.format(Locale.ROOT, "from foo-index,bar-index"); + try (var resp = run(command)) { + var valuesList = getValuesList(resp); + assertEquals(8, resp.columns().size()); + assertEquals(0, valuesList.size()); + assertEquals(Collections.emptyList(), valuesList); + } + } + public void testCountTextField() { assertAcked(client().admin().indices().prepareCreate("test_count").setMapping("name", "type=text")); int numDocs = between(10, 1000); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index f13321f03f0fe..689672075fb03 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -40,7 +40,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; -import static org.hamcrest.core.IsNot.not; /** * Runs test scenarios from EsqlActionIT, with an extra level of indirection @@ -74,8 +73,6 @@ protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, Query String id = response.asyncExecutionId().get(); if (response.isRunning() == false) { assertThat(request.keepOnCompletion(), is(true)); - assertThat(response.columns(), is(not(empty()))); - assertThat(response.pages(), is(not(empty()))); initialColumns = List.copyOf(response.columns()); initialPages = deepCopyOf(response.pages(), TestBlockFactory.getNonBreakingInstance()); } else { diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index dbaefa2e5aebf..81f12997248c8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -130,6 +130,7 @@ RP : ')'; TRUE : 'true'; EQ : '=='; +CIEQ : '=~'; NEQ : '!='; LT : '<'; LTE : '<='; @@ -177,8 +178,8 @@ EXPR_WS // mode FROM_MODE; FROM_PIPE : PIPE -> type(PIPE), popMode; -FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(FROM_MODE), pushMode(FROM_MODE); -FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode, popMode; +FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET); +FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET); FROM_COMMA : COMMA -> type(COMMA); FROM_ASSIGN : ASSIGN -> type(ASSIGN); @@ -220,11 +221,15 @@ fragment UNQUOTED_ID_BODY_WITH_PATTERN : (LETTER | DIGIT | UNDERSCORE | ASTERISK) ; -PROJECT_UNQUOTED_IDENTIFIER +UNQUOTED_ID_PATTERN : (LETTER | ASTERISK) UNQUOTED_ID_BODY_WITH_PATTERN* | (UNDERSCORE | ASPERAND) UNQUOTED_ID_BODY_WITH_PATTERN+ ; +PROJECT_UNQUOTED_IDENTIFIER + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) + ; + PROJECT_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; @@ -257,7 +262,7 @@ RENAME_QUOTED_IDENTIFIER // use the unquoted pattern to let the parser invalidate fields with * RENAME_UNQUOTED_IDENTIFIER - : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) ; RENAME_LINE_COMMENT @@ -275,19 +280,29 @@ RENAME_WS // | ENRICH ON key WITH fields mode ENRICH_MODE; ENRICH_PIPE : PIPE -> type(PIPE), popMode; +ENRICH_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(SETTING_MODE); ON : 'on' -> pushMode(ENRICH_FIELD_MODE); WITH : 'with' -> pushMode(ENRICH_FIELD_MODE); -// use the unquoted pattern to let the parser invalidate fields with * -ENRICH_POLICY_UNQUOTED_IDENTIFIER - : FROM_UNQUOTED_IDENTIFIER -> type(FROM_UNQUOTED_IDENTIFIER) +// similar to that of an index +// see https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html#indices-create-api-path-params +fragment ENRICH_POLICY_NAME_BODY + : ~[\\/?"<>| ,#\t\r\n:] + ; + +ENRICH_POLICY_NAME + : (LETTER | DIGIT) ENRICH_POLICY_NAME_BODY* ; ENRICH_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; +ENRICH_MODE_UNQUOTED_VALUE + : ENRICH_POLICY_NAME -> type(ENRICH_POLICY_NAME) + ; + ENRICH_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; @@ -310,7 +325,7 @@ ENRICH_FIELD_DOT: DOT -> type(DOT); ENRICH_FIELD_WITH : WITH -> type(WITH) ; ENRICH_FIELD_UNQUOTED_IDENTIFIER - : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + : UNQUOTED_ID_PATTERN -> type(UNQUOTED_ID_PATTERN) ; ENRICH_FIELD_QUOTED_IDENTIFIER @@ -373,3 +388,25 @@ SHOW_MULTILINE_COMMENT SHOW_WS : WS -> channel(HIDDEN) ; + +mode SETTING_MODE; +SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; + +COLON : ':'; + +SETTING + : (ASPERAND | DIGIT| DOT | LETTER | UNDERSCORE)+ + ; + +SETTING_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +SETTTING_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +SETTING_WS + : WS -> channel(HIDDEN) + ; + diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens new file mode 100644 index 0000000000000..be2d95ba9531f --- /dev/null +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -0,0 +1,166 @@ +DISSECT=1 +DROP=2 +ENRICH=3 +EVAL=4 +EXPLAIN=5 +FROM=6 +GROK=7 +INLINESTATS=8 +KEEP=9 +LIMIT=10 +MV_EXPAND=11 +PROJECT=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +UNQUOTED_ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 +FUNCTIONS=97 +SHOW_LINE_COMMENT=98 +SHOW_MULTILINE_COMMENT=99 +SHOW_WS=100 +COLON=101 +SETTING=102 +SETTING_LINE_COMMENT=103 +SETTTING_MULTILINE_COMMENT=104 +SETTING_WS=105 +'dissect'=1 +'drop'=2 +'enrich'=3 +'eval'=4 +'explain'=5 +'from'=6 +'grok'=7 +'inlinestats'=8 +'keep'=9 +'limit'=10 +'mv_expand'=11 +'project'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=97 +':'=101 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index cdf0cea58b230..a9539bff1b765 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -111,15 +111,11 @@ evalCommand ; statsCommand - : STATS fields? (BY grouping)? + : STATS stats=fields? (BY grouping=fields)? ; inlinestatsCommand - : INLINESTATS fields (BY grouping)? - ; - -grouping - : qualifiedName (COMMA qualifiedName)* + : INLINESTATS stats=fields (BY grouping=fields)? ; fromIdentifier @@ -141,7 +137,7 @@ identifier ; identifierPattern - : PROJECT_UNQUOTED_IDENTIFIER + : UNQUOTED_ID_PATTERN | QUOTED_IDENTIFIER ; @@ -229,7 +225,7 @@ string ; comparisonOperator - : EQ | NEQ | LT | LTE | GT | GTE + : EQ | CIEQ | NEQ | LT | LTE | GT | GTE ; explainCommand @@ -246,9 +242,13 @@ showCommand ; enrichCommand - : ENRICH policyName=fromIdentifier (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? + : ENRICH setting* policyName=ENRICH_POLICY_NAME (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? ; enrichWithClause : (newName=qualifiedNamePattern ASSIGN)? enrichField=qualifiedNamePattern ; + +setting + : OPENING_BRACKET name=SETTING COLON value=SETTING CLOSING_BRACKET + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens new file mode 100644 index 0000000000000..be2d95ba9531f --- /dev/null +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -0,0 +1,166 @@ +DISSECT=1 +DROP=2 +ENRICH=3 +EVAL=4 +EXPLAIN=5 +FROM=6 +GROK=7 +INLINESTATS=8 +KEEP=9 +LIMIT=10 +MV_EXPAND=11 +PROJECT=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +UNQUOTED_ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 +FUNCTIONS=97 +SHOW_LINE_COMMENT=98 +SHOW_MULTILINE_COMMENT=99 +SHOW_WS=100 +COLON=101 +SETTING=102 +SETTING_LINE_COMMENT=103 +SETTTING_MULTILINE_COMMENT=104 +SETTING_WS=105 +'dissect'=1 +'drop'=2 +'enrich'=3 +'eval'=4 +'explain'=5 +'from'=6 +'grok'=7 +'inlinestats'=8 +'keep'=9 +'limit'=10 +'mv_expand'=11 +'project'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=97 +':'=101 diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java new file mode 100644 index 0000000000000..9417a2374ac12 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -0,0 +1,121 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. + * This class is generated. Do not edit it. + */ +public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator lhs; + + private final ByteRunAutomaton rhs; + + private final DriverContext driverContext; + + public InsensitiveEqualsConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, + ByteRunAutomaton rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.lhs = lhs; + this.rhs = rhs; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock lhsBlock = (BytesRefBlock) lhs.eval(page)) { + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return eval(page.getPositionCount(), lhsBlock); + } + return eval(page.getPositionCount(), lhsVector).asBlock(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef lhsScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(InsensitiveEquals.processConstant(lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch), rhs)); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, BytesRefVector lhsVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef lhsScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "InsensitiveEqualsConstantEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(lhs); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final ByteRunAutomaton rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + ByteRunAutomaton rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public InsensitiveEqualsConstantEvaluator get(DriverContext context) { + return new InsensitiveEqualsConstantEvaluator(source, lhs.get(context), rhs, context); + } + + @Override + public String toString() { + return "InsensitiveEqualsConstantEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsEvaluator.java new file mode 100644 index 0000000000000..8832a77928aa3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. + * This class is generated. Do not edit it. + */ +public final class InsensitiveEqualsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator lhs; + + private final EvalOperator.ExpressionEvaluator rhs; + + private final DriverContext driverContext; + + public InsensitiveEqualsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.lhs = lhs; + this.rhs = rhs; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock lhsBlock = (BytesRefBlock) lhs.eval(page)) { + try (BytesRefBlock rhsBlock = (BytesRefBlock) rhs.eval(page)) { + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlock); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlock); + } + return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef lhsScratch = new BytesRef(); + BytesRef rhsScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(InsensitiveEquals.process(lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch), rhsBlock.getBytesRef(rhsBlock.getFirstValueIndex(p), rhsScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef lhsScratch = new BytesRef(); + BytesRef rhsScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(InsensitiveEquals.process(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "InsensitiveEqualsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(lhs, rhs); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public InsensitiveEqualsEvaluator get(DriverContext context) { + return new InsensitiveEqualsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "InsensitiveEqualsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java new file mode 100644 index 0000000000000..5ec9dcb94f67f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java @@ -0,0 +1,125 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianShape}. + * This class is generated. Do not edit it. + */ +public final class ToCartesianShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToCartesianShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToCartesianShapeFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianShape.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianShape.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToCartesianShapeFromStringEvaluator get(DriverContext context) { + return new ToCartesianShapeFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToCartesianShapeFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java new file mode 100644 index 0000000000000..68a6087d86953 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java @@ -0,0 +1,125 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoShape}. + * This class is generated. Do not edit it. + */ +public final class ToGeoShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToGeoShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToGeoShapeFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoShape.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoShape.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToGeoShapeFromStringEvaluator get(DriverContext context) { + return new ToGeoShapeFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToGeoShapeFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java similarity index 68% rename from x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java rename to x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java index 5f424bc4e568b..5e466ddfbfddc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromCartesianPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java @@ -10,25 +10,24 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; /** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. * This class is generated. Do not edit it. */ -public final class ToLongFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromCartesianPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, +public final class ToStringFromCartesianShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromCartesianShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, DriverContext driverContext) { super(driverContext, field, source); } @Override public String name() { - return "ToLongFromCartesianPoint"; + return "ToStringFromCartesianShape"; } @Override @@ -37,26 +36,26 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } - try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } return builder.build(); } } - private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { BytesRef value = container.getBytesRef(index, scratchPad); - return ToLong.fromCartesianPoint(value); + return ToString.fromCartesianShape(value); } @Override public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); @@ -65,12 +64,12 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i, scratchPad); + BytesRef value = evalValue(block, i, scratchPad); if (positionOpened == false && valueCount > 1) { builder.beginPositionEntry(); positionOpened = true; } - builder.appendLong(value); + builder.appendBytesRef(value); valuesAppended = true; } if (valuesAppended == false) { @@ -83,9 +82,9 @@ public Block evalBlock(Block b) { } } - private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { BytesRef value = container.getBytesRef(index, scratchPad); - return ToLong.fromCartesianPoint(value); + return ToString.fromCartesianShape(value); } public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { @@ -99,13 +98,13 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { } @Override - public ToLongFromCartesianPointEvaluator get(DriverContext context) { - return new ToLongFromCartesianPointEvaluator(field.get(context), source, context); + public ToStringFromCartesianShapeEvaluator get(DriverContext context) { + return new ToStringFromCartesianShapeEvaluator(field.get(context), source, context); } @Override public String toString() { - return "ToLongFromCartesianPointEvaluator[field=" + field + "]"; + return "ToStringFromCartesianShapeEvaluator[field=" + field + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java similarity index 68% rename from x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java rename to x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java index e85f2191023fe..df8e86e58fa69 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromGeoPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java @@ -10,25 +10,24 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; /** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. * This class is generated. Do not edit it. */ -public final class ToLongFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromGeoPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, +public final class ToStringFromGeoShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromGeoShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, DriverContext driverContext) { super(driverContext, field, source); } @Override public String name() { - return "ToLongFromGeoPoint"; + return "ToStringFromGeoShape"; } @Override @@ -37,26 +36,26 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } - try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } return builder.build(); } } - private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { BytesRef value = container.getBytesRef(index, scratchPad); - return ToLong.fromGeoPoint(value); + return ToString.fromGeoShape(value); } @Override public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); @@ -65,12 +64,12 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i, scratchPad); + BytesRef value = evalValue(block, i, scratchPad); if (positionOpened == false && valueCount > 1) { builder.beginPositionEntry(); positionOpened = true; } - builder.appendLong(value); + builder.appendBytesRef(value); valuesAppended = true; } if (valuesAppended == false) { @@ -83,9 +82,9 @@ public Block evalBlock(Block b) { } } - private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { BytesRef value = container.getBytesRef(index, scratchPad); - return ToLong.fromGeoPoint(value); + return ToString.fromGeoShape(value); } public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { @@ -99,13 +98,13 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { } @Override - public ToLongFromGeoPointEvaluator get(DriverContext context) { - return new ToLongFromGeoPointEvaluator(field.get(context), source, context); + public ToStringFromGeoShapeEvaluator get(DriverContext context) { + return new ToStringFromGeoShapeEvaluator(field.get(context), source, context); } @Override public String toString() { - return "ToLongFromGeoPointEvaluator[field=" + field + "]"; + return "ToStringFromGeoShapeEvaluator[field=" + field + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java new file mode 100644 index 0000000000000..23f28385916c7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java @@ -0,0 +1,118 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.Locale; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLower}. + * This class is generated. Do not edit it. + */ +public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final Locale locale; + + private final DriverContext driverContext; + + public ToLowerEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock valBlock = (BytesRefBlock) val.eval(page)) { + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(ToLower.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, BytesRefVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(ToLower.process(valVector.getBytesRef(p, valScratch), locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ToLowerEvaluator[" + "val=" + val + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale) { + this.source = source; + this.val = val; + this.locale = locale; + } + + @Override + public ToLowerEvaluator get(DriverContext context) { + return new ToLowerEvaluator(source, val.get(context), locale, context); + } + + @Override + public String toString() { + return "ToLowerEvaluator[" + "val=" + val + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java new file mode 100644 index 0000000000000..5c3e86184d460 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java @@ -0,0 +1,118 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.Locale; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUpper}. + * This class is generated. Do not edit it. + */ +public final class ToUpperEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final Locale locale; + + private final DriverContext driverContext; + + public ToUpperEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock valBlock = (BytesRefBlock) val.eval(page)) { + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(ToUpper.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, BytesRefVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(ToUpper.process(valVector.getBytesRef(p, valScratch), locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ToUpperEvaluator[" + "val=" + val + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale) { + this.source = source; + this.val = val; + this.locale = locale; + } + + @Override + public ToUpperEvaluator get(DriverContext context) { + return new ToUpperEvaluator(source, val.get(context), locale, context); + } + + @Override + public String toString() { + return "ToUpperEvaluator[" + "val=" + val + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java index 673ec0bc4a184..79ce1754f7163 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java @@ -162,18 +162,18 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); } }; - case "geo_point" -> new PositionToXContent(block) { + case "geo_point", "geo_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; - case "cartesian_point" -> new PositionToXContent(block) { + case "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; case "boolean" -> new PositionToXContent(block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java index f6593dccb9c49..ee153c5b29b2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java @@ -17,6 +17,6 @@ public class EsqlAsyncGetResultAction extends ActionType { public static final String NAME = EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME; private EsqlAsyncGetResultAction() { - super(NAME, in -> { throw new IllegalArgumentException("can't transport EsqlAsyncGetResultAction"); }); + super(NAME); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java index 13b5b067f5cc9..901a01dcf857e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java @@ -15,6 +15,6 @@ public class EsqlQueryAction extends ActionType { public static final String NAME = "indices:data/read/esql"; private EsqlQueryAction() { - super(NAME, in -> { throw new IllegalArgumentException("can't transport EsqlQuery"); }); + super(NAME); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 63686820574b5..1763e36707958 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -39,6 +39,8 @@ public class EsqlQueryResponse extends ActionResponse implements ChunkedToXConte @SuppressWarnings("this-escape") private final AbstractRefCounted counted = AbstractRefCounted.of(this::closeInternal); + public static final String DROP_NULL_COLUMNS_OPTION = "drop_null_columns"; + private final List columns; private final List pages; private final Profile profile; @@ -160,20 +162,45 @@ private Iterator asyncPropertiesOrEmpty() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - final Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar); + boolean dropNullColumns = params.paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); + boolean[] nullColumns = dropNullColumns ? nullColumns() : null; + Iterator columnHeadings = dropNullColumns + ? Iterators.concat( + ResponseXContentUtils.allColumns(columns, "all_columns"), + ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns") + ) + : ResponseXContentUtils.allColumns(columns, "columns"); + Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns); Iterator profileRender = profile == null ? List.of().iterator() : ChunkedToXContentHelper.field("profile", profile, params); return Iterators.concat( ChunkedToXContentHelper.startObject(), asyncPropertiesOrEmpty(), - ResponseXContentUtils.columnHeadings(columns), + columnHeadings, ChunkedToXContentHelper.array("values", valuesIt), profileRender, ChunkedToXContentHelper.endObject() ); } + private boolean[] nullColumns() { + boolean[] nullColumns = new boolean[columns.size()]; + for (int c = 0; c < nullColumns.length; c++) { + nullColumns[c] = allColumnsAreNull(c); + } + return nullColumns; + } + + private boolean allColumnsAreNull(int c) { + for (Page page : pages) { + if (page.getBlock(c).areAllValuesNull() == false) { + return false; + } + } + return true; + } + @Override public boolean isFragment() { return false; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index 7b525642009a7..0022866cf1742 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -132,16 +132,14 @@ private RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws IOExce if (mediaType instanceof TextFormat format) { restResponse = RestResponse.chunked( RestStatus.OK, - ChunkedRestResponseBody.fromTextChunks( - format.contentType(restRequest), - format.format(restRequest, esqlResponse), - releasable - ) + ChunkedRestResponseBody.fromTextChunks(format.contentType(restRequest), format.format(restRequest, esqlResponse)), + releasable ); } else { restResponse = RestResponse.chunked( RestStatus.OK, - ChunkedRestResponseBody.fromXContent(esqlResponse, channel.request(), channel, releasable) + ChunkedRestResponseBody.fromXContent(esqlResponse, channel.request(), channel), + releasable ); } long tookNanos = stopWatch.stop().getNanos(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 625b488b1e857..d5dc12357f3fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -101,8 +101,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point" -> GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "cartesian_point" -> CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "geo_point", "geo_shape" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "cartesian_point", "cartesian_shape" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -161,14 +161,14 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li throw new UncheckedIOException(e); } } - case "geo_point" -> { + case "geo_point", "geo_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = GEO.stringAsWKB(value.toString()); + BytesRef wkb = GEO.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } - case "cartesian_point" -> { + case "cartesian_point", "cartesian_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = CARTESIAN.stringAsWKB(value.toString()); + BytesRef wkb = CARTESIAN.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java index e28e6beebabed..ca40faff81c55 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java @@ -23,10 +23,12 @@ */ final class ResponseXContentUtils { - /** Returns the column headings for the given columns. */ - static Iterator columnHeadings(List columns) { + /** + * Returns the column headings for the given columns. + */ + static Iterator allColumns(List columns, String name) { return ChunkedToXContentHelper.singleChunk((builder, params) -> { - builder.startArray("columns"); + builder.startArray(name); for (ColumnInfo col : columns) { col.toXContent(builder, params); } @@ -34,43 +36,62 @@ static Iterator columnHeadings(List columns) { }); } + /** + * Returns the column headings for the given columns, moving the heading + * for always-null columns to a {@code null_columns} section. + */ + static Iterator nonNullColumns(List columns, boolean[] nullColumns, String name) { + return ChunkedToXContentHelper.singleChunk((builder, params) -> { + builder.startArray(name); + for (int c = 0; c < columns.size(); c++) { + if (nullColumns[c] == false) { + columns.get(c).toXContent(builder, params); + } + } + return builder.endArray(); + }); + } + /** Returns the column values for the given pages (described by the column infos). */ - static Iterator columnValues(List columns, List pages, boolean columnar) { + static Iterator columnValues( + List columns, + List pages, + boolean columnar, + boolean[] nullColumns + ) { if (pages.isEmpty()) { return Collections.emptyIterator(); } else if (columnar) { - return columnarValues(columns, pages); + return columnarValues(columns, pages, nullColumns); } else { - return rowValues(columns, pages); + return rowValues(columns, pages, nullColumns); } } /** Returns a columnar based representation of the values in the given pages (described by the column infos). */ - static Iterator columnarValues(List columns, List pages) { + static Iterator columnarValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); - return Iterators.flatMap( - Iterators.forRange( - 0, - columns.size(), - column -> Iterators.concat( - Iterators.single(((builder, params) -> builder.startArray())), - Iterators.flatMap(pages.iterator(), page -> { - ColumnInfo.PositionToXContent toXContent = columns.get(column).positionToXContent(page.getBlock(column), scratch); - return Iterators.forRange( - 0, - page.getPositionCount(), - position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) - ); - }), - ChunkedToXContentHelper.endArray() - ) - ), - Function.identity() - ); + return Iterators.flatMap(Iterators.forRange(0, columns.size(), column -> { + if (nullColumns != null && nullColumns[column]) { + return Collections.emptyIterator(); + } + return Iterators.concat( + Iterators.single(((builder, params) -> builder.startArray())), + Iterators.flatMap(pages.iterator(), page -> { + ColumnInfo.PositionToXContent toXContent = columns.get(column).positionToXContent(page.getBlock(column), scratch); + return Iterators.forRange( + 0, + page.getPositionCount(), + position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) + ); + }), + ChunkedToXContentHelper.endArray() + ); + }), Function.identity()); } /** Returns a row based representation of the values in the given pages (described by the column infos). */ - static Iterator rowValues(List columns, List pages) { + static Iterator rowValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); return Iterators.flatMap(pages.iterator(), page -> { final int columnCount = columns.size(); @@ -82,7 +103,9 @@ static Iterator rowValues(List columns, List

    (builder, params) -> { builder.startArray(); for (int c = 0; c < columnCount; c++) { - toXContents[c].positionToXContent(builder, params, position); + if (nullColumns == null || nullColumns[c] == false) { + toXContents[c].positionToXContent(builder, params, position); + } } return builder.endArray(); }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 3dea461ccf8b7..0b2bad2eb22d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -18,11 +18,11 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; @ServerlessScope(Scope.PUBLIC) @@ -60,6 +60,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override protected Set responseParams() { - return Collections.singleton(URL_PARAM_DELIMITER); + return Set.of(URL_PARAM_DELIMITER, DROP_NULL_COLUMNS_OPTION); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java index 35a679e23d1f7..b5a1821350e5e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java @@ -16,8 +16,11 @@ import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; +import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; @ServerlessScope(Scope.PUBLIC) public class RestEsqlGetAsyncResultAction extends BaseRestHandler { @@ -42,4 +45,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return channel -> client.execute(EsqlAsyncGetResultAction.INSTANCE, get, new RestRefCountedChunkedToXContentListener<>(channel)); } + + @Override + protected Set responseParams() { + return Set.of(URL_PARAM_DELIMITER, DROP_NULL_COLUMNS_OPTION); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 6b8e7fc397865..070c0e112e051 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -65,6 +64,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override protected Set responseParams() { - return Collections.singleton(URL_PARAM_DELIMITER); + return Set.of(URL_PARAM_DELIMITER, EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 674a32db1f0fb..5fb56f674f5d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -43,7 +42,6 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -209,52 +207,35 @@ protected LogicalPlan rule(Enrich plan, AnalyzerContext context) { // the policy does not exist return plan; } - String policyName = (String) plan.policyName().fold(); - EnrichPolicyResolution policyRes = context.enrichResolution() - .resolvedPolicies() - .stream() - .filter(x -> x.policyName().equals(policyName)) - .findFirst() - .orElse(new EnrichPolicyResolution(policyName, null, null)); - - IndexResolution idx = policyRes.index(); - EnrichPolicy policy = policyRes.policy(); - - var policyNameExp = policy == null || idx == null - ? new UnresolvedAttribute( - plan.policyName().source(), - policyName, - null, - unresolvedPolicyError(policyName, context.enrichResolution()) - ) - : plan.policyName(); - - var matchField = policy != null && (plan.matchField() == null || plan.matchField() instanceof EmptyAttribute) - ? new UnresolvedAttribute(plan.source(), policy.getMatchField()) - : plan.matchField(); - - List enrichFields = policy == null || idx == null - ? (plan.enrichFields() == null ? List.of() : plan.enrichFields()) - : calculateEnrichFields( + final String policyName = (String) plan.policyName().fold(); + final var resolved = context.enrichResolution().getResolvedPolicy(policyName, plan.mode()); + if (resolved != null) { + var policy = new EnrichPolicy(resolved.matchType(), null, List.of(), resolved.matchField(), resolved.enrichFields()); + var matchField = plan.matchField() == null || plan.matchField() instanceof EmptyAttribute + ? new UnresolvedAttribute(plan.source(), policy.getMatchField()) + : plan.matchField(); + List enrichFields = calculateEnrichFields( plan.source(), policyName, - mappingAsAttributes(plan.source(), idx.get().mapping()), + mappingAsAttributes(plan.source(), resolved.mapping()), plan.enrichFields(), policy ); - - return new Enrich(plan.source(), plan.child(), policyNameExp, matchField, policyRes, enrichFields); - } - - private String unresolvedPolicyError(String policyName, EnrichResolution enrichResolution) { - List potentialMatches = StringUtils.findSimilar(policyName, enrichResolution.existingPolicies()); - String msg = "unresolved enrich policy [" + policyName + "]"; - if (CollectionUtils.isEmpty(potentialMatches) == false) { - msg += ", did you mean " - + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]" : "any of " + potentialMatches) - + "?"; + return new Enrich( + plan.source(), + plan.child(), + plan.mode(), + plan.policyName(), + matchField, + policy, + resolved.concreteIndices(), + enrichFields + ); + } else { + String error = context.enrichResolution().getError(policyName, plan.mode()); + var policyNameExp = new UnresolvedAttribute(plan.policyName().source(), policyName, null, error); + return new Enrich(plan.source(), plan.child(), plan.mode(), policyNameExp, plan.matchField(), null, Map.of(), List.of()); } - return msg; } public static List calculateEnrichFields( @@ -463,26 +444,24 @@ private LogicalPlan resolveKeep(Project p, List childOutput) { // otherwise resolve them else { Map priorities = new LinkedHashMap<>(); - for (Attribute attribute : childOutput) { - for (var proj : projections) { - List resolved; - int priority; - if (proj instanceof UnresolvedStar) { - resolved = childOutput; - priority = 2; - } else if (proj instanceof UnresolvedAttribute ua) { - resolved = resolveAgainstList(ua, childOutput); - priority = Regex.isSimpleMatchPattern(ua.name()) ? 1 : 0; - } else { - resolved = List.of(attribute); - priority = 0; - } - for (Attribute attr : resolved) { - Integer previousPrio = priorities.get(attr); - if (previousPrio == null || previousPrio >= priority) { - priorities.remove(attr); - priorities.put(attr, priority); - } + for (var proj : projections) { + final List resolved; + final int priority; + if (proj instanceof UnresolvedStar) { + resolved = childOutput; + priority = 2; + } else if (proj instanceof UnresolvedAttribute ua) { + resolved = resolveAgainstList(ua, childOutput); + priority = Regex.isSimpleMatchPattern(ua.name()) ? 1 : 0; + } else { + assert false : "unexpected projection: " + proj; + throw new IllegalStateException("unexpected projection: " + proj); + } + for (Attribute attr : resolved) { + Integer previousPrio = priorities.get(attr); + if (previousPrio == null || previousPrio >= priority) { + priorities.remove(attr); + priorities.put(attr, priority); } } } @@ -584,7 +563,16 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) "Unsupported type [" + resolved.dataType() + "] for enrich matching field [" + ua.name() + "]; only KEYWORD allowed" ); } - return new Enrich(enrich.source(), enrich.child(), enrich.policyName(), resolved, enrich.policy(), enrich.enrichFields()); + return new Enrich( + enrich.source(), + enrich.child(), + enrich.mode(), + enrich.policyName(), + resolved, + enrich.policy(), + enrich.concreteIndices(), + enrich.enrichFields() + ); } return enrich; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java index 332e5e60565b6..7fb279f18b1dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java @@ -7,8 +7,51 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import java.util.Set; +import java.util.Collection; +import java.util.Map; -public record EnrichResolution(Set resolvedPolicies, Set existingPolicies) {} +/** + * Holds the resolution results of the enrich polices. + * The results and errors are collected via {@link #addResolvedPolicy} and {@link #addError}. + * And the results can be retrieved via {@link #getResolvedPolicy} and {@link #getError} + */ +public final class EnrichResolution { + + private final Map resolvedPolicies = ConcurrentCollections.newConcurrentMap(); + private final Map errors = ConcurrentCollections.newConcurrentMap(); + + public ResolvedEnrichPolicy getResolvedPolicy(String policyName, Enrich.Mode mode) { + return resolvedPolicies.get(new Key(policyName, mode)); + } + + public Collection resolvedEnrichPolicies() { + return resolvedPolicies.values(); + + } + + public String getError(String policyName, Enrich.Mode mode) { + final String error = errors.get(new Key(policyName, mode)); + if (error != null) { + return error; + } else { + assert false : "unresolved enrich policy [" + policyName + "] mode [" + mode + "]"; + return "unresolved enrich policy [" + policyName + "] mode [" + mode + "]"; + } + } + + public void addResolvedPolicy(String policyName, Enrich.Mode mode, ResolvedEnrichPolicy policy) { + resolvedPolicies.putIfAbsent(new Key(policyName, mode), policy); + } + + public void addError(String policyName, Enrich.Mode mode, String reason) { + errors.putIfAbsent(new Key(policyName, mode), reason); + } + + private record Key(String policyName, Enrich.Mode mode) { + + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index f34a4f0f37a70..654ece932e4ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -23,11 +23,11 @@ public static class PreAnalysis { public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList()); public final List indices; - public final List policyNames; + public final List enriches; - public PreAnalysis(List indices, List policyNames) { + public PreAnalysis(List indices, List enriches) { this.indices = indices; - this.policyNames = policyNames; + this.enriches = enriches; } } @@ -41,14 +41,14 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { protected PreAnalysis doPreAnalyze(LogicalPlan plan) { List indices = new ArrayList<>(); - List policyNames = new ArrayList<>(); + List unresolvedEnriches = new ArrayList<>(); plan.forEachUp(EsqlUnresolvedRelation.class, p -> indices.add(new TableInfo(p.table(), p.frozen()))); - plan.forEachUp(Enrich.class, p -> policyNames.add((String) p.policyName().fold())); + plan.forEachUp(Enrich.class, unresolvedEnriches::add); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); - return new PreAnalysis(indices, policyNames); + return new PreAnalysis(indices, unresolvedEnriches); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d38dd57ff6aa8..66953e93ae9af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -21,11 +21,8 @@ import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.Literal; -import org.elasticsearch.xpack.ql.expression.MetadataAttribute; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.NamedExpression; -import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -85,11 +82,9 @@ else if (p.resolved()) { // handle aggregate first to disambiguate between missing fields or incorrect function declaration if (p instanceof Aggregate aggregate) { for (NamedExpression agg : aggregate.aggregates()) { - if (agg instanceof Alias as) { - var child = as.child(); - if (child instanceof UnresolvedAttribute u) { - failures.add(fail(child, "invalid stats declaration; [{}] is not an aggregate function", child.sourceText())); - } + var child = Alias.unwrap(agg); + if (child instanceof UnresolvedAttribute) { + failures.add(fail(child, "invalid stats declaration; [{}] is not an aggregate function", child.sourceText())); } } } @@ -150,36 +145,36 @@ else if (p.resolved()) { private static void checkAggregate(LogicalPlan p, Set failures) { if (p instanceof Aggregate agg) { + // check aggregates agg.aggregates().forEach(e -> { - var exp = e instanceof Alias ? ((Alias) e).child() : e; - if (exp instanceof AggregateFunction aggFunc) { - Expression field = aggFunc.field(); - - // TODO: allow an expression? - if ((field instanceof FieldAttribute - || field instanceof MetadataAttribute - || field instanceof ReferenceAttribute - || field instanceof Literal) == false) { + var exp = Alias.unwrap(e); + if (exp instanceof AggregateFunction af) { + af.field().forEachDown(AggregateFunction.class, f -> { + failures.add(fail(f, "nested aggregations [{}] not allowed inside other aggregations [{}]", f, af)); + }); + } else { + if (Expressions.match(agg.groupings(), g -> Alias.unwrap(g).semanticEquals(exp)) == false) { failures.add( fail( - e, - "aggregate function's field must be an attribute or literal; found [" - + field.sourceText() + exp, + "expected an aggregate function or group but got [" + + exp.sourceText() + "] of type [" - + field.nodeName() + + exp.nodeName() + "]" ) ); } - } else if (agg.groupings().contains(exp) == false) { // TODO: allow an expression? - failures.add( - fail( - exp, - "expected an aggregate function or group but got [" + exp.sourceText() + "] of type [" + exp.nodeName() + "]" - ) - ); } }); + + // check grouping + // The grouping can not be an aggregate function + agg.groupings().forEach(e -> e.forEachUp(g -> { + if (g instanceof AggregateFunction af) { + failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); + } + })); } } @@ -214,12 +209,17 @@ private static void checkRow(LogicalPlan p, Set failures) { private static void checkEvalFields(LogicalPlan p, Set failures) { if (p instanceof Eval eval) { eval.fields().forEach(field -> { + // check supported types DataType dataType = field.dataType(); if (EsqlDataTypes.isRepresentable(dataType) == false) { failures.add( fail(field, "EVAL does not support type [{}] in expression [{}]", dataType.typeName(), field.child().sourceText()) ); } + // check no aggregate functions are used + field.forEachDown(AggregateFunction.class, af -> { + failures.add(fail(af, "aggregate function [{}] not allowed outside STATS command", af.sourceText())); + }); }); } } @@ -279,7 +279,9 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { allowed.add(DataTypes.DATETIME); allowed.add(DataTypes.VERSION); allowed.add(EsqlDataTypes.GEO_POINT); + allowed.add(EsqlDataTypes.GEO_SHAPE); allowed.add(EsqlDataTypes.CARTESIAN_POINT); + allowed.add(EsqlDataTypes.CARTESIAN_SHAPE); if (bc instanceof Equals || bc instanceof NotEquals) { allowed.add(DataTypes.BOOLEAN); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index a533c373ad2ca..1115329fd2c73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -74,6 +73,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -274,10 +274,15 @@ private void doLookup( NamedExpression extractField = extractFields.get(i); final ElementType elementType = PlannerUtils.toElementType(extractField.dataType()); mergingTypes[i] = elementType; - BlockLoader loader = BlockReaderFactories.loader( + EsPhysicalOperationProviders.ShardContext ctx = new EsPhysicalOperationProviders.DefaultShardContext( + 0, searchContext.getSearchExecutionContext(), + searchContext.request().getAliasFilter() + ); + BlockLoader loader = ctx.blockLoader( extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), - EsqlDataTypes.isUnsupported(extractField.dataType()) + EsqlDataTypes.isUnsupported(extractField.dataType()), + MappedFieldType.FieldExtractPreference.NONE ); fields.add( new ValuesSourceReaderOperator.FieldInfo( @@ -296,9 +301,12 @@ private void doLookup( new ValuesSourceReaderOperator( driverContext.blockFactory(), fields, - List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.searcher().getIndexReader(), () -> { - throw new UnsupportedOperationException("can't load _source as part of enrich"); - })), + List.of( + new ValuesSourceReaderOperator.ShardContext( + searchContext.searcher().getIndexReader(), + searchContext::newSourceLoader + ) + ), 0 ) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java deleted file mode 100644 index 5014fe1fcd1df..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.enrich; - -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.ql.index.IndexResolution; - -public record EnrichPolicyResolution(String policyName, EnrichPolicy policy, IndexResolution index) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index 1e21886a7ac4b..7ddca0f914709 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -9,29 +9,62 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.enrich.EnrichMetadata; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.session.EsqlSession; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolver; +import org.elasticsearch.xpack.ql.type.EsField; +import org.elasticsearch.xpack.ql.util.StringUtils; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +/** + * Resolves enrich policies across clusters in several steps: + * 1. Calculates the policies that need to be resolved for each cluster, see {@link #lookupPolicies}. + * 2. Sends out {@link LookupRequest} to each cluster to resolve policies. Internally, a remote cluster handles the lookup in two steps: + * - 2.1 Ensures the caller has permission to access the enrich policies. + * - 2.2 For each found enrich policy, uses {@link IndexResolver} to resolve the mappings of the concrete enrich index. + * 3. For each unresolved policy, combines the lookup results to compute the actual enrich policy and mappings depending on the enrich mode. + * This approach requires at most one cross-cluster call for each cluster. + */ public class EnrichPolicyResolver { private static final String RESOLVE_ACTION_NAME = "cluster:monitor/xpack/enrich/esql/resolve_policy"; @@ -48,94 +81,328 @@ public EnrichPolicyResolver(ClusterService clusterService, TransportService tran transportService.registerRequestHandler( RESOLVE_ACTION_NAME, threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME), - ResolveRequest::new, + LookupRequest::new, new RequestHandler() ); } - public void resolvePolicy(String policyName, ActionListener listener) { - transportService.sendRequest( - clusterService.localNode(), - RESOLVE_ACTION_NAME, - new ResolveRequest(policyName), - new ActionListenerResponseHandler<>( - listener.map(r -> r.resolution), - ResolveResponse::new, - threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME) - ) - ); + public record UnresolvedPolicy(String name, Enrich.Mode mode) { + + } + + /** + * Resolves a set of enrich policies + * + * @param targetClusters the target clusters + * @param unresolvedPolicies the unresolved policies + * @param listener notified with the enrich resolution + */ + public void resolvePolicies( + Collection targetClusters, + Collection unresolvedPolicies, + ActionListener listener + ) { + if (unresolvedPolicies.isEmpty() || targetClusters.isEmpty()) { + listener.onResponse(new EnrichResolution()); + return; + } + final Set remoteClusters = new HashSet<>(targetClusters); + final boolean includeLocal = remoteClusters.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + lookupPolicies(remoteClusters, includeLocal, unresolvedPolicies, listener.map(lookupResponses -> { + final EnrichResolution enrichResolution = new EnrichResolution(); + for (UnresolvedPolicy unresolved : unresolvedPolicies) { + Tuple resolved = mergeLookupResults( + unresolved, + calculateTargetClusters(unresolved.mode, includeLocal, remoteClusters), + lookupResponses + ); + if (resolved.v1() != null) { + enrichResolution.addResolvedPolicy(unresolved.name, unresolved.mode, resolved.v1()); + } else { + assert resolved.v2() != null; + enrichResolution.addError(unresolved.name, unresolved.mode, resolved.v2()); + } + } + return enrichResolution; + })); + } + + private Collection calculateTargetClusters(Enrich.Mode mode, boolean includeLocal, Set remoteClusters) { + return switch (mode) { + case ANY -> CollectionUtils.appendToCopy(remoteClusters, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + case COORDINATOR -> List.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + case REMOTE -> includeLocal + ? CollectionUtils.appendToCopy(remoteClusters, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) + : remoteClusters; + }; + } + + /** + * Resolve an enrich policy by merging the lookup responses from the target clusters. + * @return a resolved enrich policy or an error + */ + private Tuple mergeLookupResults( + UnresolvedPolicy unresolved, + Collection targetClusters, + Map lookupResults + ) { + assert targetClusters.isEmpty() == false; + String policyName = unresolved.name; + final Map policies = new HashMap<>(); + final List failures = new ArrayList<>(); + for (String cluster : targetClusters) { + LookupResponse lookupResult = lookupResults.get(cluster); + if (lookupResult != null) { + ResolvedEnrichPolicy policy = lookupResult.policies.get(policyName); + if (policy != null) { + policies.put(cluster, policy); + } else { + final String failure = lookupResult.failures.get(policyName); + if (failure != null) { + failures.add(failure); + } + } + } + } + if (targetClusters.size() != policies.size()) { + final String reason; + if (failures.isEmpty()) { + List missingClusters = targetClusters.stream().filter(c -> policies.containsKey(c) == false).sorted().toList(); + reason = missingPolicyError(policyName, targetClusters, missingClusters); + } else { + reason = "failed to resolve enrich policy [" + policyName + "]; reason " + failures; + } + return Tuple.tuple(null, reason); + } + Map mappings = new HashMap<>(); + Map concreteIndices = new HashMap<>(); + ResolvedEnrichPolicy last = null; + for (Map.Entry e : policies.entrySet()) { + ResolvedEnrichPolicy curr = e.getValue(); + if (last != null && last.matchField().equals(curr.matchField()) == false) { + String error = "enrich policy [" + policyName + "] has different match fields "; + error += "[" + last.matchField() + ", " + curr.matchField() + "] across clusters"; + return Tuple.tuple(null, error); + } + if (last != null && last.matchType().equals(curr.matchType()) == false) { + String error = "enrich policy [" + policyName + "] has different match types "; + error += "[" + last.matchType() + ", " + curr.matchType() + "] across clusters"; + return Tuple.tuple(null, error); + } + // merge mappings + for (Map.Entry m : curr.mapping().entrySet()) { + EsField field = m.getValue(); + field = new EsField( + field.getName(), + EsqlDataTypes.fromTypeName(field.getDataType().typeName()), + field.getProperties(), + field.isAggregatable(), + field.isAlias() + ); + EsField old = mappings.putIfAbsent(m.getKey(), field); + if (old != null && old.getDataType().equals(field.getDataType()) == false) { + String error = "field [" + m.getKey() + "] of enrich policy [" + policyName + "] has different data types "; + error += "[" + old.getDataType() + ", " + field.getDataType() + "] across clusters"; + return Tuple.tuple(null, error); + } + } + if (last != null) { + Map counts = Maps.newMapWithExpectedSize(last.enrichFields().size()); + last.enrichFields().forEach(f -> counts.put(f, 1)); + curr.enrichFields().forEach(f -> counts.compute(f, (k, v) -> v == null ? 1 : v + 1)); + // should be sorted-then-limit, but this sorted is for testing only + var diff = counts.entrySet().stream().filter(f -> f.getValue() < 2).map(Map.Entry::getKey).limit(20).sorted().toList(); + if (diff.isEmpty() == false) { + String detailed = "these fields are missing in some policies: " + diff; + return Tuple.tuple(null, "enrich policy [" + policyName + "] has different enrich fields across clusters; " + detailed); + } + } + // merge concrete indices + concreteIndices.putAll(curr.concreteIndices()); + last = curr; + } + assert last != null; + var resolved = new ResolvedEnrichPolicy(last.matchField(), last.matchType(), last.enrichFields(), concreteIndices, mappings); + return Tuple.tuple(resolved, null); + } + + private String missingPolicyError(String policyName, Collection targetClusters, List missingClusters) { + // local cluster only + if (targetClusters.size() == 1 && Iterables.get(missingClusters, 0).isEmpty()) { + String reason = "enrich policy [" + policyName + "] doesn't exist"; + // accessing the policy names directly after we have checked the permission. + List potentialMatches = StringUtils.findSimilar(policyName, availablePolicies().keySet()); + if (potentialMatches.isEmpty() == false) { + var suggestion = potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]" : "any of " + potentialMatches; + reason += ", did you mean " + suggestion + "?"; + } + return reason; + } + String detailed = missingClusters.stream().sorted().map(c -> c.isEmpty() ? "_local" : c).collect(Collectors.joining(", ")); + return "enrich policy [" + policyName + "] doesn't exist on clusters [" + detailed + "]"; } - private static UnsupportedOperationException unsupported() { - return new UnsupportedOperationException("local node transport action"); + private void lookupPolicies( + Collection remoteClusters, + boolean includeLocal, + Collection unresolvedPolicies, + ActionListener> listener + ) { + final Map lookupResponses = ConcurrentCollections.newConcurrentMap(); + try (RefCountingListener refs = new RefCountingListener(listener.map(unused -> lookupResponses))) { + Set remotePolicies = unresolvedPolicies.stream() + .filter(u -> u.mode != Enrich.Mode.COORDINATOR) + .map(u -> u.name) + .collect(Collectors.toSet()); + // remote clusters + if (remotePolicies.isEmpty() == false) { + for (String cluster : remoteClusters) { + final Transport.Connection connection; + try { + connection = getRemoteConnection(cluster); + } catch (Exception e) { + refs.acquire().onFailure(e); + return; + } + transportService.sendRequest( + connection, + RESOLVE_ACTION_NAME, + new LookupRequest(cluster, remotePolicies), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>( + refs.acquire(resp -> lookupResponses.put(cluster, resp)), + LookupResponse::new, + threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME) + ) + ); + } + } + // local cluster + Set localPolicies = unresolvedPolicies.stream() + .filter(u -> includeLocal || u.mode != Enrich.Mode.REMOTE) + .map(u -> u.name) + .collect(Collectors.toSet()); + if (localPolicies.isEmpty() == false) { + transportService.sendRequest( + transportService.getLocalNode(), + RESOLVE_ACTION_NAME, + new LookupRequest(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, localPolicies), + new ActionListenerResponseHandler<>( + refs.acquire(resp -> lookupResponses.put(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, resp)), + LookupResponse::new, + threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME) + ) + ); + } + } } - private static class ResolveRequest extends TransportRequest { - private final String policyName; + private static class LookupRequest extends TransportRequest { + private final String clusterAlias; + private final Collection policyNames; - ResolveRequest(String policyName) { - this.policyName = policyName; + LookupRequest(String clusterAlias, Collection policyNames) { + this.clusterAlias = clusterAlias; + this.policyNames = policyNames; } - ResolveRequest(StreamInput in) { - throw unsupported(); + LookupRequest(StreamInput in) throws IOException { + this.clusterAlias = in.readString(); + this.policyNames = in.readStringCollectionAsList(); } @Override - public void writeTo(StreamOutput out) { - throw unsupported(); + public void writeTo(StreamOutput out) throws IOException { + out.writeString(clusterAlias); + out.writeStringCollection(policyNames); } } - private static class ResolveResponse extends TransportResponse { - private final EnrichPolicyResolution resolution; + private static class LookupResponse extends TransportResponse { + final Map policies; + final Map failures; - ResolveResponse(EnrichPolicyResolution resolution) { - this.resolution = resolution; + LookupResponse(Map policies, Map failures) { + this.policies = policies; + this.failures = failures; } - ResolveResponse(StreamInput in) { - throw unsupported(); + LookupResponse(StreamInput in) throws IOException { + this.policies = in.readMap(StreamInput::readString, ResolvedEnrichPolicy::new); + this.failures = in.readMap(StreamInput::readString, StreamInput::readString); } @Override - public void writeTo(StreamOutput out) { - throw unsupported(); + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(policies, (o, v) -> v.writeTo(o)); + out.writeMap(failures, StreamOutput::writeString); } } - private class RequestHandler implements TransportRequestHandler { + private class RequestHandler implements TransportRequestHandler { @Override - public void messageReceived(ResolveRequest request, TransportChannel channel, Task task) throws Exception { - String policyName = request.policyName; - EnrichPolicy policy = policies().get(policyName); + public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { + final Map availablePolicies = availablePolicies(); + final Map failures = ConcurrentCollections.newConcurrentMap(); + final Map resolvedPolices = ConcurrentCollections.newConcurrentMap(); ThreadContext threadContext = threadPool.getThreadContext(); - ActionListener listener = new ChannelActionListener<>(channel); - listener = ContextPreservingActionListener.wrapPreservingContext(listener, threadContext); - try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { - indexResolver.resolveAsMergedMapping( - EnrichPolicy.getBaseName(policyName), - IndexResolver.ALL_FIELDS, - false, - Map.of(), - listener.map(indexResult -> new ResolveResponse(new EnrichPolicyResolution(policyName, policy, indexResult))), - EsqlSession::specificValidity - ); + ActionListener listener = ContextPreservingActionListener.wrapPreservingContext( + new ChannelActionListener<>(channel), + threadContext + ); + try ( + RefCountingListener refs = new RefCountingListener(listener.map(unused -> new LookupResponse(resolvedPolices, failures))) + ) { + for (String policyName : request.policyNames) { + EnrichPolicy p = availablePolicies.get(policyName); + if (p == null) { + continue; + } + try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { + String indexName = EnrichPolicy.getBaseName(policyName); + indexResolver.resolveAsMergedMapping( + indexName, + IndexResolver.ALL_FIELDS, + false, + Map.of(), + refs.acquire(indexResult -> { + if (indexResult.isValid() && indexResult.get().concreteIndices().size() == 1) { + EsIndex esIndex = indexResult.get(); + var concreteIndices = Map.of(request.clusterAlias, Iterables.get(esIndex.concreteIndices(), 0)); + var resolved = new ResolvedEnrichPolicy( + p.getMatchField(), + p.getType(), + p.getEnrichFields(), + concreteIndices, + esIndex.mapping() + ); + resolvedPolices.put(policyName, resolved); + } else { + failures.put(policyName, indexResult.toString()); + } + }), + EsqlSession::specificValidity + ); + } + } } } } - public Set allPolicyNames() { - // TODO: remove this suggestion as it exposes policy names without the right permission - return policies().keySet(); + protected Map availablePolicies() { + final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); + return metadata == null ? Map.of() : metadata.getPolicies(); } - private Map policies() { - if (clusterService == null || clusterService.state() == null) { - return Map.of(); - } - EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); - return metadata == null ? Map.of() : metadata.getPolicies(); + protected Transport.Connection getRemoteConnection(String cluster) { + return transportService.getRemoteClusterService().getConnection(cluster); } + public Map> groupIndicesPerCluster(String[] indices) { + return transportService.getRemoteClusterService() + .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, indices) + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> Arrays.asList(e.getValue().indices()))); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java new file mode 100644 index 0000000000000..446e2e90af397 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.type.EsField; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public record ResolvedEnrichPolicy( + String matchField, + String matchType, + List enrichFields, + Map concreteIndices, + Map mapping +) implements Writeable { + public ResolvedEnrichPolicy(StreamInput in) throws IOException { + this( + in.readString(), + in.readString(), + in.readStringCollectionAsList(), + in.readMap(StreamInput::readString), + in.readMap(StreamInput::readString, ResolvedEnrichPolicy::readEsField) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(matchField); + out.writeString(matchType); + out.writeStringCollection(enrichFields); + out.writeMap(concreteIndices, StreamOutput::writeString); + out.writeMap(mapping, ResolvedEnrichPolicy::writeEsField); + } + + // TODO: we should have made EsField and DataType Writable, but write it as NamedWritable in PlanStreamInput + private static void writeEsField(StreamOutput out, EsField field) throws IOException { + out.writeString(field.getName()); + out.writeString(field.getDataType().typeName()); + out.writeMap(field.getProperties(), ResolvedEnrichPolicy::writeEsField); + out.writeBoolean(field.isAggregatable()); + out.writeBoolean(field.isAlias()); + } + + private static EsField readEsField(StreamInput in) throws IOException { + return new EsField( + in.readString(), + EsqlDataTypes.fromTypeName(in.readString()), + in.readMap(ResolvedEnrichPolicy::readEsField), + in.readBoolean(), + in.readBoolean() + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 54c9fec4da96a..1087e9d33b805 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.ComparisonMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InMapper; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEqualsMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMapper; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; @@ -48,6 +49,7 @@ public final class EvalMapper { ComparisonMapper.LESS_THAN_OR_EQUAL, InMapper.IN_MAPPER, RegexMapper.REGEX_MATCH, + new InsensitiveEqualsMapper(), new BooleanLogic(), new Nots(), new Attributes(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index 7f5a6079cc6d7..85b30032c1070 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -163,10 +163,7 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) if (leftType == DataTypes.DATETIME) { return longs.apply(bc.source(), leftEval, rightEval); } - if (leftType == EsqlDataTypes.GEO_POINT) { - return geometries.apply(bc.source(), leftEval, rightEval, leftType); - } - if (leftType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(leftType)) { return geometries.apply(bc.source(), leftEval, rightEval, leftType); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveBinaryComparison.java new file mode 100644 index 0000000000000..3f8030ee18f97 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveBinaryComparison.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +public abstract class InsensitiveBinaryComparison extends BinaryScalarFunction { + + protected InsensitiveBinaryComparison(Source source, Expression left, Expression right) { + super(source, left, right); + } + + @Override + public DataType dataType() { + return DataTypes.BOOLEAN; + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEquals.java new file mode 100644 index 0000000000000..ba0ebc5552cea --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEquals.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.lucene.search.AutomatonQueries; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +public class InsensitiveEquals extends InsensitiveBinaryComparison { + + public InsensitiveEquals(Source source, Expression left, Expression right) { + super(source, left, right); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, InsensitiveEquals::new, left(), right()); + } + + @Override + protected InsensitiveEquals replaceChildren(Expression newLeft, Expression newRight) { + return new InsensitiveEquals(source(), newLeft, newRight); + } + + @Evaluator + static boolean process(BytesRef lhs, BytesRef rhs) { + return processConstant(lhs, new ByteRunAutomaton(automaton(rhs))); + } + + @Evaluator(extraName = "Constant") + static boolean processConstant(BytesRef lhs, @Fixed ByteRunAutomaton rhs) { + return rhs.run(lhs.bytes, lhs.offset, lhs.length); + } + + public String symbol() { + return "=~"; + } + + protected TypeResolution resolveType() { + return TypeResolutions.isString(left(), sourceText(), TypeResolutions.ParamOrdinal.FIRST) + .and(TypeResolutions.isString(right(), sourceText(), TypeResolutions.ParamOrdinal.SECOND)) + .and(TypeResolutions.isFoldable(right(), sourceText(), TypeResolutions.ParamOrdinal.SECOND)); + } + + public static Automaton automaton(BytesRef val) { + return AutomatonQueries.toCaseInsensitiveString(val.utf8ToString()); + } + + @Override + public Boolean fold() { + BytesRef leftVal = BytesRefs.toBytesRef(left().fold()); + BytesRef rightVal = BytesRefs.toBytesRef(right().fold()); + if (leftVal == null || rightVal == null) { + return null; + } + return process(leftVal, rightVal); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsMapper.java new file mode 100644 index 0000000000000..8fdacf72e811c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsMapper.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.planner.Layout; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; + +public class InsensitiveEqualsMapper extends ExpressionMapper { + + private final TriFunction keywords = + InsensitiveEqualsEvaluator.Factory::new; + + @Override + public final ExpressionEvaluator.Factory map(InsensitiveEquals bc, Layout layout) { + DataType leftType = bc.left().dataType(); + DataType rightType = bc.right().dataType(); + + var leftEval = toEvaluator(bc.left(), layout); + var rightEval = toEvaluator(bc.right(), layout); + if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT) { + if (bc.right().foldable() && EsqlDataTypes.isString(rightType)) { + BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold()); + Automaton automaton = InsensitiveEquals.automaton(rightVal); + return dvrCtx -> new InsensitiveEqualsConstantEvaluator( + bc.source(), + leftEval.get(dvrCtx), + new ByteRunAutomaton(automaton), + dvrCtx + ); + } + return keywords.apply(bc.source(), leftEval, rightEval); + } + throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); + } + + public static ExpressionEvaluator.Factory castToEvaluator( + InsensitiveEquals op, + Layout layout, + DataType required, + TriFunction factory + ) { + var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(op.left(), layout)); + var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(op.right(), layout)); + return factory.apply(op.source(), lhs, rhs); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index d47ccf11c9985..e774ba36b16e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -17,6 +18,7 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class EsqlTypeResolutions { @@ -42,4 +44,8 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa } return Expression.TypeResolution.TYPE_RESOLVED; } + + public static Expression.TypeResolution isSpatialPoint(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, "geo_point or cartesian_point"); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index b7e7df6cfc499..fa00ec5430657 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -15,16 +15,19 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -82,6 +85,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition; @@ -154,7 +159,9 @@ private FunctionDefinition[][] functions() { def(Replace.class, Replace::new, "replace"), def(Right.class, Right::new, "right"), def(StartsWith.class, StartsWith::new, "starts_with"), - def(EndsWith.class, EndsWith::new, "ends_with") }, + def(EndsWith.class, EndsWith::new, "ends_with"), + def(ToLower.class, ToLower::new, "to_lower"), + def(ToUpper.class, ToUpper::new, "to_upper") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), @@ -163,6 +170,8 @@ private FunctionDefinition[][] functions() { def(DateParse.class, DateParse::new, "date_parse"), def(DateTrunc.class, DateTrunc::new, "date_trunc"), def(Now.class, Now::new, "now") }, + // spatial + new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid") }, // conditional new FunctionDefinition[] { def(Case.class, Case::new, "case") }, // null @@ -173,10 +182,12 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(ToBoolean.class, ToBoolean::new, "to_boolean", "to_bool"), def(ToCartesianPoint.class, ToCartesianPoint::new, "to_cartesianpoint"), + def(ToCartesianShape.class, ToCartesianShape::new, "to_cartesianshape"), def(ToDatetime.class, ToDatetime::new, "to_datetime", "to_dt"), def(ToDegrees.class, ToDegrees::new, "to_degrees"), def(ToDouble.class, ToDouble::new, "to_double", "to_dbl"), def(ToGeoPoint.class, ToGeoPoint::new, "to_geopoint"), + def(ToGeoShape.class, ToGeoShape::new, "to_geoshape"), def(ToIP.class, ToIP::new, "to_ip"), def(ToInteger.class, ToInteger::new, "to_integer", "to_int"), def(ToLong.class, ToLong::new, "to_long"), @@ -210,7 +221,14 @@ public static String normalizeName(String name) { public record ArgSignature(String name, String[] type, String description, boolean optional) {} - public record FunctionDescription(String name, List args, String[] returnType, String description, boolean variadic) { + public record FunctionDescription( + String name, + List args, + String[] returnType, + String description, + boolean variadic, + boolean isAggregation + ) { public String fullSignature() { StringBuilder builder = new StringBuilder(); builder.append(ShowFunctions.withPipes(returnType)); @@ -245,29 +263,30 @@ public List argNames() { public static FunctionDescription description(FunctionDefinition def) { var constructors = def.clazz().getConstructors(); if (constructors.length == 0) { - return new FunctionDescription(def.name(), List.of(), null, null, false); + return new FunctionDescription(def.name(), List.of(), null, null, false, false); } Constructor constructor = constructors[0]; FunctionInfo functionInfo = constructor.getAnnotation(FunctionInfo.class); - String functionDescription = functionInfo == null ? "" : functionInfo.description(); + String functionDescription = functionInfo == null ? "" : functionInfo.description().replaceAll("\n", " "); String[] returnType = functionInfo == null ? new String[] { "?" } : functionInfo.returnType(); var params = constructor.getParameters(); // no multiple c'tors supported List args = new ArrayList<>(params.length); boolean variadic = false; + boolean isAggregation = functionInfo == null ? false : functionInfo.isAggregation(); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { Param paramInfo = params[i].getAnnotation(Param.class); String name = paramInfo == null ? params[i].getName() : paramInfo.name(); variadic |= List.class.isAssignableFrom(params[i].getType()); String[] type = paramInfo == null ? new String[] { "?" } : paramInfo.type(); - String desc = paramInfo == null ? "" : paramInfo.description(); + String desc = paramInfo == null ? "" : paramInfo.description().replaceAll("\n", " "); boolean optional = paramInfo == null ? false : paramInfo.optional(); args.add(new EsqlFunctionRegistry.ArgSignature(name, type, desc, optional)); } } - return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic); + return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index 835bbfa16c25e..cd2e710498e5e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -21,4 +21,6 @@ String[] returnType(); String description() default ""; + + boolean isAggregation() default false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 14e29a684a2fd..0ba834d1d8954 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -23,7 +25,8 @@ public class Avg extends AggregateFunction implements SurrogateExpression { - public Avg(Source source, Expression field) { + @FunctionInfo(returnType = "double", description = "The average of a numeric field.", isAggregation = true) + public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index dcb52b6a3f2c1..6413f241dc0ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -10,6 +10,8 @@ import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.CountAggregatorFunction; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -26,7 +28,28 @@ public class Count extends AggregateFunction implements EnclosedAgg, ToAggregator { - public Count(Source source, Expression field) { + @FunctionInfo(returnType = "long", description = "Returns the total number (count) of input values.", isAggregation = true) + public Count( + Source source, + @Param( + optional = true, + name = "field", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Column or literal for which to count the number of values." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index c49f9d6c45c1d..62dd3bc6b6254 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -15,6 +15,8 @@ import org.elasticsearch.compute.aggregation.CountDistinctLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -35,7 +37,28 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument private static final int DEFAULT_PRECISION = 3000; private final Expression precision; - public CountDistinct(Source source, Expression field, Expression precision) { + @FunctionInfo(returnType = "long", description = "Returns the approximate number of distinct values.", isAggregation = true) + public CountDistinct( + Source source, + @Param( + name = "field", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Column or literal for which to count the number of distinct values." + ) Expression field, + @Param(optional = true, name = "precision", type = { "integer" }) Expression precision + ) { super(source, field, precision != null ? List.of(precision) : List.of()); this.precision = precision; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 0964ce2bd5d67..cdcfe20c968a8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MaxDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,12 @@ public class Max extends NumericAggregate { - public Max(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The maximum value of a numeric field.", + isAggregation = true + ) + public Max(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index cffeb925d5e2b..7f5bce981db51 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.aggregation.QuantileStates; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -24,7 +26,12 @@ public class Median extends AggregateFunction implements SurrogateExpression { // TODO: Add the compression parameter - public Median(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The value that is greater than half of all values and less than half of all values.", + isAggregation = true + ) + public Median(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 6fafbeae8e1f4..ddf0fd15fe2d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,15 @@ public class MedianAbsoluteDeviation extends NumericAggregate { // TODO: Add parameter - public MedianAbsoluteDeviation(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The median absolute deviation, a measure of variability.", + isAggregation = true + ) + public MedianAbsoluteDeviation( + Source source, + @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 9625322fb72c8..22da614675f9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.MinDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,12 @@ public class Min extends NumericAggregate { - public Min(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The minimum value of a numeric field.", + isAggregation = true + ) + public Min(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 9620e112fbda7..c34783f7352c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.aggregation.PercentileDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,7 +27,16 @@ public class Percentile extends NumericAggregate { private final Expression percentile; - public Percentile(Source source, Expression field, Expression percentile) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "The value at which a certain percentage of observed values occur.", + isAggregation = true + ) + public Percentile( + Source source, + @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field, + @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile + ) { super(source, field, List.of(percentile)); this.percentile = percentile; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java new file mode 100644 index 0000000000000..6ce07a272711b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; + +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatialPoint; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; + +/** + * Calculate spatial centroid of all geo_point or cartesian point values of a field in matching documents. + */ +public class SpatialCentroid extends SpatialAggregateFunction implements ToAggregator { + + @FunctionInfo(returnType = { "geo_point", "cartesian_point" }, description = "The centroid of a spatial field.", isAggregation = true) + public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { + super(source, field, false); + } + + private SpatialCentroid(Source source, Expression field, boolean useDocValues) { + super(source, field, useDocValues); + } + + @Override + public SpatialCentroid withDocValues() { + return new SpatialCentroid(source(), field(), true); + } + + @Override + protected Expression.TypeResolution resolveType() { + // TODO: Support geo_shape and cartesian_shape + return isSpatialPoint(field(), sourceText(), DEFAULT); + } + + @Override + public DataType dataType() { + // We aggregate incoming GEO_POINTs into a single GEO_POINT, or incoming CARTESIAN_POINTs into a single CARTESIAN_POINT. + return field().dataType(); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialCentroid::new, field()); + } + + @Override + public SpatialCentroid replaceChildren(List newChildren) { + return new SpatialCentroid(source(), newChildren.get(0)); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + DataType type = field().dataType(); + if (useDocValues) { + // When the points are read as doc-values (eg. from the index), feed them into the doc-values aggregator + if (type == EsqlDataTypes.GEO_POINT) { + return new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); + } + if (type == EsqlDataTypes.CARTESIAN_POINT) { + return new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); + } + } else { + // When the points are read as WKB from source or as point literals, feed them into the source-values aggregator + if (type == EsqlDataTypes.GEO_POINT) { + return new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + } + if (type == EsqlDataTypes.CARTESIAN_POINT) { + return new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + } + } + throw EsqlIllegalArgumentException.illegalDataType(type); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 115e2f9759fa9..0acf18981a83d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -10,6 +10,8 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,7 +28,8 @@ */ public class Sum extends NumericAggregate { - public Sum(Source source, Expression field) { + @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) + public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 0174eca9c1ddf..84fa57c8d636a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -17,6 +17,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -44,7 +46,44 @@ record Condition(Expression condition, Expression value) {} private final Expression elseValue; private DataType dataType; - public Case(Source source, Expression first, List rest) { + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = """ + Accepts pairs of conditions and values. + The function returns the value that belongs to the first condition that evaluates to true.""" + ) + public Case( + Source source, + @Param(name = "condition", type = { "boolean" }) Expression first, + @Param( + name = "rest", + type = { + "boolean", + "cartesian_point", + "date", + "double", + "geo_point", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" } + ) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); int conditionCount = children().size() / 2; conditions = new ArrayList<>(conditionCount); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 25477e501645d..84b442b4df699 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.ql.expression.Expression; @@ -37,6 +38,10 @@ public class Greatest extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; + @FunctionInfo( + returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, + description = "Returns the maximum value from many columns." + ) public Greatest( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, @@ -106,6 +111,8 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + // force datatype initialization + var dataType = dataType(); ExpressionEvaluator.Factory[] factories = children().stream() .map(e -> toEvaluator.apply(new MvMax(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index e7456b0871b7c..462c71098d169 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.ql.expression.Expression; @@ -37,6 +38,10 @@ public class Least extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; + @FunctionInfo( + returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, + description = "Returns the minimum value from many columns." + ) public Least( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, @@ -106,6 +111,9 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + // force datatype initialization + var dataType = dataType(); + ExpressionEvaluator.Factory[] factories = children().stream() .map(e -> toEvaluator.apply(new MvMin(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 3a33e086d8fdd..388ab970205ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -41,7 +41,7 @@ public class ToBoolean extends AbstractConvertFunction { Map.entry(INTEGER, ToBooleanFromIntEvaluator.Factory::new) ); - @FunctionInfo(returnType = "boolean") + @FunctionInfo(returnType = "boolean", description = "Converts an input value to a boolean value.") public ToBoolean( Source source, @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 3b8bd582571f4..3756c322abc4e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -32,7 +32,7 @@ public class ToCartesianPoint extends AbstractConvertFunction { Map.entry(TEXT, ToCartesianPointFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "cartesian_point") + @FunctionInfo(returnType = "cartesian_point", description = "Converts an input value to a point value.") public ToCartesianPoint(Source source, @Param(name = "v", type = { "cartesian_point", "keyword", "text" }) Expression field) { super(source, field); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.stringAsWKB(in.utf8ToString()); + return CARTESIAN.wktToWkb(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java new file mode 100644 index 0000000000000..51294b7834fc4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianShape extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval), + Map.entry(CARTESIAN_SHAPE, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToCartesianShapeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToCartesianShapeFromStringEvaluator.Factory::new) + ); + + @FunctionInfo(returnType = "cartesian_shape", description = "Converts an input value to a shape value.") + public ToCartesianShape( + Source source, + @Param(name = "v", type = { "cartesian_point", "cartesian_shape", "keyword", "text" }) Expression field + ) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return CARTESIAN_SHAPE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToCartesianShape(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToCartesianShape::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static BytesRef fromKeyword(BytesRef in) { + return CARTESIAN.wktToWkb(in.utf8ToString()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index c2f621433ca21..1ff8bc39e36f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -40,7 +40,7 @@ public class ToDatetime extends AbstractConvertFunction { Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "date") + @FunctionInfo(returnType = "date", description = "Converts an input value to a date value.") public ToDatetime( Source source, @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index c858bdbdb3993..c5e7b473f4e56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -40,8 +40,8 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ) ); - @FunctionInfo(returnType = "double") - public ToDegrees(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { + @FunctionInfo(returnType = "double", description = "Converts a number in radians to degrees.") + public ToDegrees(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 7711f55d667ba..6a984abdad50f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -42,7 +42,7 @@ public class ToDouble extends AbstractConvertFunction { Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new) // CastIntToDoubleEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "double") + @FunctionInfo(returnType = "double", description = "Converts an input value to a double value.") public ToDouble( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index ab265dad6a477..16ea1235ccf59 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -32,7 +32,7 @@ public class ToGeoPoint extends AbstractConvertFunction { Map.entry(TEXT, ToGeoPointFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "geo_point") + @FunctionInfo(returnType = "geo_point", description = "Converts an input value to a geo_point value.") public ToGeoPoint(Source source, @Param(name = "v", type = { "geo_point", "keyword", "text" }) Expression field) { super(source, field); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.stringAsWKB(in.utf8ToString()); + return GEO.wktToWkb(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java new file mode 100644 index 0000000000000..3a5d5f2012df6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoShape extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(GEO_POINT, (fieldEval, source) -> fieldEval), + Map.entry(GEO_SHAPE, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToGeoShapeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToGeoShapeFromStringEvaluator.Factory::new) + ); + + @FunctionInfo(returnType = "geo_shape", description = "Converts an input value to a geo_shape value.") + public ToGeoShape(Source source, @Param(name = "v", type = { "geo_point", "geo_shape", "keyword", "text" }) Expression field) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return GEO_SHAPE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToGeoShape(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToGeoShape::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static BytesRef fromKeyword(BytesRef in) { + return GEO.wktToWkb(in.utf8ToString()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 97512a03fe2ec..fc6a5f5c69afa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -32,7 +32,7 @@ public class ToIP extends AbstractConvertFunction { Map.entry(TEXT, ToIPFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "ip") + @FunctionInfo(returnType = "ip", description = "Converts an input string to an IP value.") public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index a8e4ef804a2ba..2288ddcc33a55 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -44,7 +44,7 @@ public class ToInteger extends AbstractConvertFunction { Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new) ); - @FunctionInfo(returnType = "integer") + @FunctionInfo(returnType = "integer", description = "Converts an input value to an integer value.") public ToInteger( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 06f56e81fc50d..ee7658b07b7f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -20,8 +20,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; @@ -33,16 +31,12 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToLong extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(LONG, (fieldEval, source) -> fieldEval), Map.entry(DATETIME, (fieldEval, source) -> fieldEval), - Map.entry(GEO_POINT, ToLongFromGeoPointEvaluator.Factory::new), - Map.entry(CARTESIAN_POINT, ToLongFromCartesianPointEvaluator.Factory::new), Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new), Map.entry(TEXT, ToLongFromStringEvaluator.Factory::new), @@ -51,13 +45,10 @@ public class ToLong extends AbstractConvertFunction { Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - @FunctionInfo(returnType = "long") + @FunctionInfo(returnType = "long", description = "Converts an input value to a long value.") public ToLong( Source source, - @Param( - name = "v", - type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer", "geo_point", "cartesian_point" } - ) Expression field + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field ) { super(source, field); } @@ -115,14 +106,4 @@ static long fromUnsignedLong(long ul) { static long fromInt(int i) { return i; } - - @ConvertEvaluator(extraName = "FromGeoPoint") - static long fromGeoPoint(BytesRef wkb) { - return GEO.wkbAsLong(wkb); - } - - @ConvertEvaluator(extraName = "FromCartesianPoint") - static long fromCartesianPoint(BytesRef wkb) { - return CARTESIAN.wkbAsLong(wkb); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index a1d2e1381109d..ac31cf3759ad9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -39,8 +39,8 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ) ); - @FunctionInfo(returnType = "double") - public ToRadians(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { + @FunctionInfo(returnType = "double", description = "Converts a number in degrees to radians.") + public ToRadians(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 26baac4f8bcb6..688996dd1db00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -23,7 +23,9 @@ import java.util.Map; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -53,27 +55,31 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(VERSION, ToStringFromVersionEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new), Map.entry(GEO_POINT, ToStringFromGeoPointEvaluator.Factory::new), - Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) + Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new), + Map.entry(CARTESIAN_SHAPE, ToStringFromCartesianShapeEvaluator.Factory::new), + Map.entry(GEO_SHAPE, ToStringFromGeoShapeEvaluator.Factory::new) ); - @FunctionInfo(returnType = "keyword") + @FunctionInfo(returnType = "keyword", description = "Converts a field into a string.") public ToString( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "cartesian_shape", + "date", "double", - "ip", - "text", + "geo_point", + "geo_shape", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression v ) { super(source, v); @@ -141,11 +147,21 @@ static BytesRef fromUnsignedLong(long lng) { @ConvertEvaluator(extraName = "FromGeoPoint") static BytesRef fromGeoPoint(BytesRef wkb) { - return new BytesRef(GEO.wkbAsString(wkb)); + return new BytesRef(GEO.wkbToWkt(wkb)); } @ConvertEvaluator(extraName = "FromCartesianPoint") static BytesRef fromCartesianPoint(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbAsString(wkb)); + return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + } + + @ConvertEvaluator(extraName = "FromCartesianShape") + static BytesRef fromCartesianShape(BytesRef wkb) { + return new BytesRef(GEO.wkbToWkt(wkb)); + } + + @ConvertEvaluator(extraName = "FromGeoShape") + static BytesRef fromGeoShape(BytesRef wkb) { + return new BytesRef(CARTESIAN.wkbToWkt(wkb)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 651259db06054..656d99ee8ab80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -46,7 +46,7 @@ public class ToUnsignedLong extends AbstractConvertFunction { Map.entry(INTEGER, ToUnsignedLongFromIntEvaluator.Factory::new) ); - @FunctionInfo(returnType = "unsigned_long") + @FunctionInfo(returnType = "unsigned_long", description = "Converts an input value to an unsigned long value.") public ToUnsignedLong( Source source, @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index 34e8f695b23c3..e196a91e3bac2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -32,7 +32,7 @@ public class ToVersion extends AbstractConvertFunction { Map.entry(TEXT, ToVersionFromStringEvaluator.Factory::new) ); - @FunctionInfo(returnType = "version") + @FunctionInfo(returnType = "version", description = "Converts an input string to a version value.") public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index eadea746a1bd1..348d4a66479f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -38,7 +40,21 @@ public class DateExtract extends ConfigurationFunction implements EvaluatorMappe private ChronoField chronoField; - public DateExtract(Source source, Expression chronoFieldExp, Expression field, Configuration configuration) { + @FunctionInfo(returnType = "long", description = "Extracts parts of a date, like year, month, day, hour.") + public DateExtract( + Source source, + // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser + // used in the CSVTests and fixing it is not trivial + @Param(name = "date_part", type = { "keyword" }, description = """ + Part of the date to extract. + Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; + aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; + day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; + milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; + offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.""") Expression chronoFieldExp, + @Param(name = "field", type = "date", description = "Date expression") Expression field, + Configuration configuration + ) { super(source, List.of(chronoFieldExp, field), configuration); } @@ -58,6 +74,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function matches; - public CIDRMatch(Source source, Expression ipField, List matches) { + @FunctionInfo(returnType = "boolean", description = "Returns true if the provided IP is contained in one of the provided CIDR blocks.") + public CIDRMatch( + Source source, + @Param(name = "ip", type = { "ip" }) Expression ipField, + @Param(name = "blockX", type = { "keyword" }, description = "CIDR block to test the IP against.") List matches + ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; this.matches = matches; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 8bc3ba3b184e9..7a2a2a5d05683 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -23,8 +23,8 @@ import java.util.function.Function; public class Abs extends UnaryScalarFunction implements EvaluatorMapper { - @FunctionInfo(returnType = { "integer", "long", "double", "unsigned_long" }) - public Abs(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Returns the absolute value.") + public Abs(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 5df73102a5ee6..603ef86af6c64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Acos(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "The arccosine of an angle, expressed in radians.") + public Acos(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 66d35d8e8bb2c..f66409921ad2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Asin(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Inverse sine trigonometric function.") + public Asin(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 88079e60fa66a..8f0ad96f96e8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -21,8 +21,8 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double") - public Atan(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Inverse tangent trigonometric function.") + public Atan(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index e754aff1853b3..eca3b236abb8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -34,11 +34,14 @@ public class Atan2 extends ScalarFunction implements EvaluatorMapper { private final Expression y; private final Expression x; - @FunctionInfo(returnType = "double") + @FunctionInfo( + returnType = "double", + description = "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." + ) public Atan2( Source source, - @Param(name = "y", type = { "integer", "long", "double", "unsigned_long" }) Expression y, - @Param(name = "x", type = { "integer", "long", "double", "unsigned_long" }) Expression x + @Param(name = "y", type = { "double", "integer", "long", "unsigned_long" }) Expression y, + @Param(name = "x", type = { "double", "integer", "long", "unsigned_long" }) Expression x ) { super(source, List.of(y, x)); this.y = y; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 27abeb44b2ff0..6a8b3f41a9c65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Foldables; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -40,7 +41,6 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isFoldable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** @@ -83,13 +83,15 @@ public class AutoBucket extends ScalarFunction implements EvaluatorMapper { private final Expression from; private final Expression to; - @FunctionInfo(returnType = { "double", "date" }) + @FunctionInfo(returnType = { "double", "date" }, description = """ + Creates human-friendly buckets and returns a datetime value + for each row that corresponds to the resulting bucket the row falls into.""") public AutoBucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date" }) Expression to + @Param(name = "from", type = { "integer", "long", "double", "date", "string" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date", "string" }) Expression to ) { super(source, List.of(field, buckets, from, to)); this.field = field; @@ -113,8 +115,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function isString(e, sourceText(), o)); + return resolveType((e, o) -> isStringOrDate(e, sourceText(), o)); } if (field.dataType().isNumeric()) { return resolveType((e, o) -> isNumeric(e, sourceText(), o)); @@ -214,6 +216,24 @@ private TypeResolution resolveType(BiFunction DataTypes.isString(exp) || DataTypes.isDateTime(exp), + operationName, + paramOrd, + "datetime", + "string" + ); + } + + private long foldToLong(Expression e) { + Object value = Foldables.valueOf(e); + return DataTypes.isDateTime(e.dataType()) + ? ((Number) value).longValue() + : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(BytesRefs.toString(value)); + } + @Override public DataType dataType() { if (field.dataType().isNumeric()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 85e5489bd74a0..cafc3d4df7613 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -30,7 +31,8 @@ *

    */ public class Ceil extends UnaryScalarFunction implements EvaluatorMapper { - public Ceil(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number up to the nearest integer.") + public Ceil(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 5f8661bb0ae7d..0ecc0381636ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -24,7 +24,7 @@ public class Cos extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") public Cos( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 6cc49cec0c32d..78d982acc7bb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -26,7 +26,7 @@ public Cosh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number who's hyperbolic cosine is to be returned" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java index d2900062f7875..3497a945f1562 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -16,6 +17,7 @@ * Function that emits Euler's number. */ public class E extends DoubleConstantFunction { + @FunctionInfo(returnType = "double", description = "Euler’s number.") public E(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index a0157105a9b82..b64be370eaded 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -30,7 +31,11 @@ *

    */ public class Floor extends UnaryScalarFunction implements EvaluatorMapper { - public Floor(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Round a number down to the nearest integer." + ) + public Floor(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 84bc9d19b409e..bca7c158ba366 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Log10 extends UnaryScalarFunction implements EvaluatorMapper { - public Log10(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Returns the log base 10.") + public Log10(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java index bd36be56b356c..a58bffd1dcbad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -16,6 +17,8 @@ * Function that emits pi. */ public class Pi extends DoubleConstantFunction { + + @FunctionInfo(returnType = "double", description = "The ratio of a circle’s circumference to its diameter.") public Pi(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 57f32cf2212d3..5ff5a7019dcdb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -35,10 +36,11 @@ public class Pow extends ScalarFunction implements OptionalArgument, EvaluatorMa private final Expression base, exponent; private final DataType dataType; + @FunctionInfo(returnType = "double", description = "Returns the value of a base raised to the power of an exponent.") public Pow( Source source, - @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }) Expression base, - @Param(name = "exponent", type = { "integer", "unsigned_long", "long", "double" }) Expression exponent + @Param(name = "base", type = { "double", "integer", "long", "unsigned_long" }) Expression base, + @Param(name = "exponent", type = { "double", "integer", "long", "unsigned_long" }) Expression exponent ) { super(source, Arrays.asList(base, exponent)); this.base = base; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 3cbc74b3b6c28..3d62fcc7e044d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -43,7 +45,18 @@ public class Round extends ScalarFunction implements OptionalArgument, Evaluator private final Expression field, decimals; - public Round(Source source, Expression field, Expression decimals) { + // @TODO: add support for "integer", "long", "unsigned_long" once tests are fixed + @FunctionInfo(returnType = "double", description = "Rounds a number to the closest number with the specified number of digits.") + public Round( + Source source, + @Param(name = "value", type = "double", description = "The numeric value to round") Expression field, + @Param( + optional = true, + name = "decimals", + type = { "integer" }, + description = "The number of decimal places to round to. Defaults to 0." + ) Expression decimals + ) { super(source, decimals != null ? Arrays.asList(field, decimals) : Arrays.asList(field)); this.field = field; this.decimals = decimals; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index 7487d8df90395..b3f204cfc09c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -25,7 +25,7 @@ public class Sin extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") public Sin( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 4b2adef5a2d6f..25221043f297d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -26,7 +26,7 @@ public Sinh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic sine of" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index bdaf3a9498b09..c3f9855fdc4ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Sqrt extends UnaryScalarFunction implements EvaluatorMapper { - public Sqrt(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { + @FunctionInfo(returnType = "double", description = "Returns the square root of a number.") + public Sqrt(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 5596c9098c034..528a0ae0a0e71 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -24,7 +24,7 @@ public class Tan extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") public Tan( Source source, - @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index ce59cec50bcca..c77bbaedf91b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -26,7 +26,7 @@ public Tanh( Source source, @Param( name = "n", - type = { "integer", "long", "double", "unsigned_long" }, + type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic tangent of" ) Expression n ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java index e40d979886d0c..fd51d9f611d41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; @@ -18,6 +19,7 @@ public class Tau extends DoubleConstantFunction { public static final double TAU = Math.PI * 2; + @FunctionInfo(returnType = "double", description = "The ratio of a circle’s circumference to its radius.") public Tau(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 296229bab9b5a..5e9a4e2a75878 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -29,7 +31,11 @@ * Reduce a multivalued field to a single valued field containing the average value. */ public class MvAvg extends AbstractMultivalueFunction { - public MvAvg(Source source, Expression field) { + @FunctionInfo( + returnType = "double", + description = "Converts a multivalued field into a single valued field containing the average of all of the values." + ) + public MvAvg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 9e4482bd48682..4fa89e66982e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -37,18 +37,20 @@ public MvCount( @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "cartesian_shape", + "date", "double", - "ip", - "text", + "geo_point", + "geo_shape", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 7d9b40ad0d24f..dc5fa0036f789 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -25,13 +25,14 @@ * Removes duplicate values from a multivalued field. */ public class MvDedupe extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Remove duplicate values from a multivalued field.") + // @TODO: add cartesian_point, geo_point, unsigned_long + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Remove duplicate values from a multivalued field." + ) public MvDedupe( Source source, - @Param( - name = "v", - type = { "boolean", "date", "double", "ip", "text", "integer", "keyword", "version", "long" } // TODO add unsigned_long - ) Expression field + @Param(name = "v", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 1acb135292995..0f6bd847d68ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -33,24 +33,43 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvFirst extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the first value.") + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Reduce a multivalued field to a single valued field containing the first value." + ) public MvFirst( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "cartesian_shape", + "date", "double", - "ip", - "text", + "geo_point", + "geo_shape", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 2e6066a6dc98c..2881854d17f6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -33,24 +33,43 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvLast extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the last value.") + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "unsigned_long", + "version" }, + description = "Reduce a multivalued field to a single valued field containing the last value." + ) public MvLast( Source source, @Param( name = "v", type = { - "unsigned_long", - "date", "boolean", + "cartesian_point", + "cartesian_shape", + "date", "double", - "ip", - "text", + "geo_point", + "geo_shape", "integer", + "ip", "keyword", - "version", "long", - "geo_point", - "cartesian_point" } + "text", + "unsigned_long", + "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index fafd8d6a584fa..0b08b99ca0687 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -29,12 +29,15 @@ * Reduce a multivalued field to a single valued field containing the maximum value. */ public class MvMax extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the maximum value.") + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + description = "Reduce a multivalued field to a single valued field containing the maximum value." + ) public MvMax( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index b60885967264c..66a8ec13b4475 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -14,6 +14,8 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -33,7 +35,11 @@ * Reduce a multivalued field to a single valued field containing the average value. */ public class MvMedian extends AbstractMultivalueFunction { - public MvMedian(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Converts a multivalued field into a single valued field containing the median value." + ) + public MvMedian(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 1ae2ef41191b5..91a48e539042f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -29,12 +29,15 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvMin extends AbstractMultivalueFunction { - @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the minimum value.") + @FunctionInfo( + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + description = "Reduce a multivalued field to a single valued field containing the minimum value." + ) public MvMin( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 48b83aa205549..96d7ad905c8d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -28,7 +30,11 @@ * Reduce a multivalued field to a single valued field containing the sum of all values. */ public class MvSum extends AbstractMultivalueFunction { - public MvSum(Source source, Expression field) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Converts a multivalued field into a single valued field containing the sum of all of the values." + ) + public MvSum(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 43d4fff9c486d..4efbb6a306366 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -16,6 +16,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -41,7 +43,23 @@ public class Coalesce extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; - public Coalesce(Source source, Expression first, List rest) { + @FunctionInfo( + returnType = { "boolean", "text", "integer", "keyword", "long" }, + description = "Returns the first of its arguments that is not null." + ) + public Coalesce( + Source source, + @Param( + name = "expression", + type = { "boolean", "text", "integer", "keyword", "long" }, + description = "Expression to evaluate" + ) Expression first, + @Param( + name = "expressionX", + type = { "boolean", "text", "integer", "keyword", "long" }, + description = "Other expression to evaluate" + ) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 61f0dac6a9bbd..a3784bd0c8579 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -14,6 +14,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -38,7 +40,12 @@ public class Concat extends ScalarFunction implements EvaluatorMapper { static final long MAX_CONCAT_LENGTH = MB.toBytes(1); - public Concat(Source source, Expression first, List rest) { + @FunctionInfo(returnType = "keyword", description = "Concatenates two or more strings.") + public Concat( + Source source, + @Param(name = "first", type = { "keyword", "text" }) Expression first, + @Param(name = "rest", type = { "keyword", "text" }) List rest + ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 250cbfad69b39..8db1d10b3d42c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -32,7 +34,15 @@ public class EndsWith extends ScalarFunction implements EvaluatorMapper { private final Expression str; private final Expression suffix; - public EndsWith(Source source, Expression str, Expression suffix) { + @FunctionInfo( + returnType = "boolean", + description = "Returns a boolean that indicates whether a keyword string ends with another string" + ) + public EndsWith( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "suffix", type = { "keyword", "text" }) Expression suffix + ) { super(source, Arrays.asList(str, suffix)); this.str = str; this.suffix = suffix; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 65d3a6388f790..70e11e69c1b70 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -42,9 +43,13 @@ public class Left extends ScalarFunction implements EvaluatorMapper { private final Expression length; + @FunctionInfo( + returnType = "keyword", + description = "Return the substring that extracts length chars from the string starting from the left." + ) public Left( Source source, - @Param(name = "string", type = { "keyword" }) Expression str, + @Param(name = "str", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 9f944c62af6a3..e3ea802981273 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -27,7 +29,8 @@ public class Length extends UnaryScalarFunction implements EvaluatorMapper { - public Length(Source source, Expression field) { + @FunctionInfo(returnType = "integer", description = "Returns the character length of a string.") + public Length(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 0ed4bd0fe7d02..c0468569216a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -37,7 +39,16 @@ public class Replace extends ScalarFunction implements EvaluatorMapper { private final Expression newStr; private final Expression regex; - public Replace(Source source, Expression str, Expression regex, Expression newStr) { + @FunctionInfo( + returnType = "keyword", + description = "The function substitutes in the string any match of the regular expression with the replacement string." + ) + public Replace( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "regex", type = { "keyword", "text" }) Expression regex, + @Param(name = "newStr", type = { "keyword", "text" }) Expression newStr + ) { super(source, Arrays.asList(str, regex, newStr)); this.str = str; this.regex = regex; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 7e96f7a396472..4d94591a007b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -42,9 +43,13 @@ public class Right extends ScalarFunction implements EvaluatorMapper { private final Expression length; + @FunctionInfo( + returnType = "keyword", + description = "Return the substring that extracts length chars from the string starting from the right." + ) public Right( Source source, - @Param(name = "string", type = { "keyword" }) Expression str, + @Param(name = "str", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 7e6b3659bbdf0..66d50aa4df061 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; @@ -32,7 +34,12 @@ * Splits a string on some delimiter into a multivalued string field. */ public class Split extends BinaryScalarFunction implements EvaluatorMapper { - public Split(Source source, Expression str, Expression delim) { + @FunctionInfo(returnType = "keyword", description = "Split a single valued string into multiple strings.") + public Split( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "delim", type = { "keyword", "text" }) Expression delim + ) { super(source, str, delim); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index d78ad3df64d1f..0acda09e7bcb1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; @@ -32,7 +34,15 @@ public class StartsWith extends ScalarFunction implements EvaluatorMapper { private final Expression str; private final Expression prefix; - public StartsWith(Source source, Expression str, Expression prefix) { + @FunctionInfo( + returnType = "boolean", + description = "Returns a boolean that indicates whether a keyword string starts with another string" + ) + public StartsWith( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "prefix", type = { "keyword", "text" }) Expression prefix + ) { super(source, Arrays.asList(str, prefix)); this.str = str; this.prefix = prefix; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index c287aeafc8d80..d3e5ab53f9e53 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -35,7 +37,16 @@ public class Substring extends ScalarFunction implements OptionalArgument, Evalu private final Expression str, start, length; - public Substring(Source source, Expression str, Expression start, Expression length) { + @FunctionInfo( + returnType = "keyword", + description = "Returns a substring of a string, specified by a start position and an optional length" + ) + public Substring( + Source source, + @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "start", type = { "integer" }) Expression start, + @Param(optional = true, name = "length", type = { "integer" }) Expression length + ) { super(source, length == null ? Arrays.asList(str, start) : Arrays.asList(str, start, length)); this.str = str; this.start = start; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java new file mode 100644 index 0000000000000..c79a5f3e051ec --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.scalar.ConfigurationFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.session.Configuration; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Locale; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +public class ToLower extends ConfigurationFunction implements EvaluatorMapper { + + private final Expression field; + + @FunctionInfo( + returnType = { "keyword", "text" }, + description = "Returns a new string representing the input string converted to lower case." + ) + public ToLower( + Source source, + @Param(name = "str", type = { "keyword", "text" }, description = "The input string") Expression field, + Configuration configuration + ) { + super(source, List.of(field), configuration); + this.field = field; + } + + @Override + public DataType dataType() { + return field.dataType(); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isString(field, sourceText(), DEFAULT); + } + + @Override + public boolean foldable() { + return field.foldable(); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Evaluator + static BytesRef process(BytesRef val, @Fixed Locale locale) { + return BytesRefs.toBytesRef(val.utf8ToString().toLowerCase(locale)); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); + return new ToLowerEvaluator.Factory(source(), fieldEvaluator, ((EsqlConfiguration) configuration()).locale()); + } + + public Expression field() { + return field; + } + + public ToLower replaceChild(Expression child) { + return new ToLower(source(), child, configuration()); + } + + @Override + public Expression replaceChildren(List newChildren) { + assert newChildren.size() == 1; + return replaceChild(newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToLower::new, field, configuration()); + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java new file mode 100644 index 0000000000000..7fc54947c0ce8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.scalar.ConfigurationFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.session.Configuration; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Locale; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +public class ToUpper extends ConfigurationFunction implements EvaluatorMapper { + + private final Expression field; + + @FunctionInfo( + returnType = { "keyword", "text" }, + description = "Returns a new string representing the input string converted to upper case." + ) + public ToUpper( + Source source, + @Param(name = "str", type = { "keyword", "text" }, description = "The input string") Expression field, + Configuration configuration + ) { + super(source, List.of(field), configuration); + this.field = field; + } + + @Override + public DataType dataType() { + return field.dataType(); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isString(field, sourceText(), DEFAULT); + } + + @Override + public boolean foldable() { + return field.foldable(); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Evaluator + static BytesRef process(BytesRef val, @Fixed Locale locale) { + return BytesRefs.toBytesRef(val.utf8ToString().toUpperCase(locale)); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); + return new ToUpperEvaluator.Factory(source(), fieldEvaluator, ((EsqlConfiguration) configuration()).locale()); + } + + public Expression field() { + return field; + } + + public ToUpper replaceChild(Expression child) { + return new ToUpper(source(), child, configuration()); + } + + @Override + public Expression replaceChildren(List newChildren) { + assert newChildren.size() == 1; + return replaceChild(newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToUpper::new, field, configuration()); + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 3a2f8797103aa..277b101c53fe7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -12,13 +12,17 @@ import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; @@ -33,6 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; @@ -40,10 +45,12 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -102,6 +109,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -165,7 +174,6 @@ import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; @@ -296,6 +304,13 @@ public static List namedTypeEntries() { of(BinaryComparison.class, GreaterThanOrEqual.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison), of(BinaryComparison.class, LessThan.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison), of(BinaryComparison.class, LessThanOrEqual.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison), + // InsensitiveEquals + of( + InsensitiveEquals.class, + InsensitiveEquals.class, + PlanNamedTypes::writeInsensitiveEquals, + PlanNamedTypes::readInsensitiveEquals + ), // InComparison of(ScalarFunction.class, In.class, PlanNamedTypes::writeInComparison, PlanNamedTypes::readInComparison), // RegexMatch @@ -331,6 +346,8 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, ToDatetime.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDegrees.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDouble.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToGeoShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToCartesianShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToGeoPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToIP.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToInteger.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -367,6 +384,8 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), of(ScalarFunction.class, Tau.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar), of(ScalarFunction.class, Replace.class, PlanNamedTypes::writeReplace, PlanNamedTypes::readReplace), + of(ScalarFunction.class, ToLower.class, PlanNamedTypes::writeToLower, PlanNamedTypes::readToLower), + of(ScalarFunction.class, ToUpper.class, PlanNamedTypes::writeToUpper, PlanNamedTypes::readToUpper), // ArithmeticOperations of(ArithmeticOperation.class, Add.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), of(ArithmeticOperation.class, Sub.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), @@ -382,6 +401,7 @@ public static List namedTypeEntries() { of(AggregateFunction.class, Median.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, MedianAbsoluteDeviation.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Percentile.class, PlanNamedTypes::writePercentile, PlanNamedTypes::readPercentile), + of(AggregateFunction.class, SpatialCentroid.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Sum.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), // Multivalue functions of(ScalarFunction.class, MvAvg.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), @@ -478,15 +498,25 @@ static void writeEvalExec(PlanStreamOutput out, EvalExec evalExec) throws IOExce } static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { - return new EnrichExec( - in.readSource(), - in.readPhysicalPlanNode(), - in.readNamedExpression(), - in.readString(), - in.readString(), - readEsIndex(in), - readNamedExpressions(in) - ); + final Source source = in.readSource(); + final PhysicalPlan child = in.readPhysicalPlanNode(); + final NamedExpression matchField = in.readNamedExpression(); + final String policyName = in.readString(); + final String policyMatchField = in.readString(); + final Map concreteIndices; + final Enrich.Mode mode; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + mode = in.readEnum(Enrich.Mode.class); + concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); + } else { + mode = Enrich.Mode.ANY; + EsIndex esIndex = readEsIndex(in); + if (esIndex.concreteIndices().size() != 1) { + throw new IllegalStateException("expected a single concrete enrich index; got " + esIndex.concreteIndices()); + } + concreteIndices = Map.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + } + return new EnrichExec(source, child, mode, matchField, policyName, policyMatchField, concreteIndices, readNamedExpressions(in)); } static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOException { @@ -495,7 +525,17 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx out.writeNamedExpression(enrich.matchField()); out.writeString(enrich.policyName()); out.writeString(enrich.policyMatchField()); - writeEsIndex(out, enrich.enrichIndex()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeEnum(enrich.mode()); + out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); + } else { + if (enrich.concreteIndices().keySet().equals(Set.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY))) { + String concreteIndex = enrich.concreteIndices().get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + writeEsIndex(out, new EsIndex(concreteIndex, Map.of(), Set.of(concreteIndex))); + } else { + throw new IllegalStateException("expected a single concrete enrich index; got " + enrich.concreteIndices()); + } + } writeNamedExpressions(out, enrich.enrichFields()); } @@ -721,24 +761,56 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { } static Enrich readEnrich(PlanStreamInput in) throws IOException { - return new Enrich( - in.readSource(), - in.readLogicalPlanNode(), - in.readExpression(), - in.readNamedExpression(), - new EnrichPolicyResolution(in.readString(), new EnrichPolicy(in), IndexResolution.valid(readEsIndex(in))), - readNamedExpressions(in) - ); + Enrich.Mode mode = Enrich.Mode.ANY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + mode = in.readEnum(Enrich.Mode.class); + } + final Source source = in.readSource(); + final LogicalPlan child = in.readLogicalPlanNode(); + final Expression policyName = in.readExpression(); + final NamedExpression matchField = in.readNamedExpression(); + if (in.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + in.readString(); // discard the old policy name + } + final EnrichPolicy policy = new EnrichPolicy(in); + final Map concreteIndices; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); + } else { + EsIndex esIndex = readEsIndex(in); + if (esIndex.concreteIndices().size() > 1) { + throw new IllegalStateException("expected a single enrich index; got " + esIndex); + } + concreteIndices = Map.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + } + return new Enrich(source, child, mode, policyName, matchField, policy, concreteIndices, readNamedExpressions(in)); } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + out.writeEnum(enrich.mode()); + } + out.writeNoSource(); out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); out.writeNamedExpression(enrich.matchField()); - out.writeString(enrich.policy().policyName()); - enrich.policy().policy().writeTo(out); - writeEsIndex(out, enrich.policy().index().get()); + if (out.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name + } + enrich.policy().writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); + } else { + Map concreteIndices = enrich.concreteIndices(); + if (concreteIndices.keySet().equals(Set.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY))) { + String enrichIndex = concreteIndices.get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + EsIndex esIndex = new EsIndex(enrichIndex, Map.of(), Set.of(enrichIndex)); + writeEsIndex(out, esIndex); + } else { + throw new IllegalStateException("expected a single enrich index; got " + concreteIndices); + } + } writeNamedExpressions(out, enrich.enrichFields()); } @@ -1087,6 +1159,20 @@ static void writeBinComparison(PlanStreamOutput out, BinaryComparison binaryComp out.writeOptionalZoneId(binaryComparison.zoneId()); } + // -- InsensitiveEquals + static InsensitiveEquals readInsensitiveEquals(PlanStreamInput in, String name) throws IOException { + var source = in.readSource(); + var left = in.readExpression(); + var right = in.readExpression(); + return new InsensitiveEquals(source, left, right); + } + + static void writeInsensitiveEquals(PlanStreamOutput out, InsensitiveEquals eq) throws IOException { + out.writeSource(eq.source()); + out.writeExpression(eq.left()); + out.writeExpression(eq.right()); + } + // -- InComparison static In readInComparison(PlanStreamInput in) throws IOException { @@ -1167,6 +1253,8 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(ToDatetime.class), ToDatetime::new), entry(name(ToDegrees.class), ToDegrees::new), entry(name(ToDouble.class), ToDouble::new), + entry(name(ToGeoShape.class), ToGeoShape::new), + entry(name(ToCartesianShape.class), ToCartesianShape::new), entry(name(ToGeoPoint.class), ToGeoPoint::new), entry(name(ToIP.class), ToIP::new), entry(name(ToInteger.class), ToInteger::new), @@ -1441,6 +1529,22 @@ static void writeReplace(PlanStreamOutput out, Replace replace) throws IOExcepti out.writeExpression(fields.get(2)); } + static ToLower readToLower(PlanStreamInput in) throws IOException { + return new ToLower(Source.EMPTY, in.readExpression(), in.configuration()); + } + + static void writeToLower(PlanStreamOutput out, ToLower toLower) throws IOException { + out.writeExpression(toLower.field()); + } + + static ToUpper readToUpper(PlanStreamInput in) throws IOException { + return new ToUpper(Source.EMPTY, in.readExpression(), in.configuration()); + } + + static void writeToUpper(PlanStreamOutput out, ToUpper toUpper) throws IOException { + out.writeExpression(toUpper.field()); + } + static Left readLeft(PlanStreamInput in) throws IOException { return new Left(in.readSource(), in.readExpression(), in.readExpression()); } @@ -1522,7 +1626,8 @@ static void writeArithmeticOperation(PlanStreamOutput out, ArithmeticOperation a entry(name(Min.class), Min::new), entry(name(Max.class), Max::new), entry(name(Median.class), Median::new), - entry(name(MedianAbsoluteDeviation.class), MedianAbsoluteDeviation::new) + entry(name(MedianAbsoluteDeviation.class), MedianAbsoluteDeviation::new), + entry(name(SpatialCentroid.class), SpatialCentroid::new) ); static AggregateFunction readAggFunction(PlanStreamInput in, String name) throws IOException { @@ -1642,7 +1747,7 @@ private static Object mapToLiteralValue(PlanStreamInput in, DataType dataType, O } private static BytesRef longAsWKB(DataType dataType, long encoded) { - return dataType == GEO_POINT ? GEO.longAsWKB(encoded) : CARTESIAN.longAsWKB(encoded); + return dataType == GEO_POINT ? GEO.longAsWkb(encoded) : CARTESIAN.longAsWkb(encoded); } private static long wkbAsLong(DataType dataType, BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 86ef7b6d1e618..f8cb1d20ba9c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -19,6 +20,7 @@ import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; @@ -43,6 +45,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Set; import static java.util.Arrays.asList; @@ -129,24 +132,35 @@ private LogicalPlan missingToNull(LogicalPlan plan, SearchStats stats) { else if (plan instanceof Project project) { var projections = project.projections(); List newProjections = new ArrayList<>(projections.size()); - List literals = new ArrayList<>(); + Map nullLiteral = Maps.newLinkedHashMapWithExpectedSize(EsqlDataTypes.types().size()); for (NamedExpression projection : projections) { if (projection instanceof FieldAttribute f && stats.exists(f.qualifiedName()) == false) { - var alias = new Alias(f.source(), f.name(), null, Literal.of(f, null), f.id()); - literals.add(alias); - newProjections.add(alias.toAttribute()); - } else { - newProjections.add(projection); + DataType dt = f.dataType(); + Alias nullAlias = nullLiteral.get(f.dataType()); + // save the first field as null (per datatype) + if (nullAlias == null) { + Alias alias = new Alias(f.source(), f.name(), null, Literal.of(f, null), f.id()); + nullLiteral.put(dt, alias); + projection = alias.toAttribute(); + } + // otherwise point to it + else { + // since avoids creating field copies + projection = new Alias(f.source(), f.name(), f.qualifier(), nullAlias.toAttribute(), f.id()); + } } + + newProjections.add(projection); } - if (literals.size() > 0) { - plan = new Eval(project.source(), project.child(), literals); + // add the first found field as null + if (nullLiteral.size() > 0) { + plan = new Eval(project.source(), project.child(), new ArrayList<>(nullLiteral.values())); plan = new Project(project.source(), plan, newProjections); - } else { - plan = project; } - } else { + } + // otherwise transform fields in place + else { plan = plan.transformExpressionsOnlyUp( FieldAttribute.class, f -> stats.exists(f.qualifiedName()) ? f : Literal.of(f, null) @@ -216,11 +230,7 @@ protected LogicalPlan rule(Aggregate aggregate, LocalLogicalOptimizerContext con var aggs = aggregate.aggregates(); Set nonNullAggFields = Sets.newLinkedHashSetWithExpectedSize(aggs.size()); for (var agg : aggs) { - Expression expr = agg; - if (agg instanceof Alias as) { - expr = as.child(); - } - if (expr instanceof AggregateFunction af) { + if (Alias.unwrap(agg) instanceof AggregateFunction af) { Expression field = af.field(); // ignore literals (e.g. COUNT(1)) // make sure the field exists at the source and is indexed (not runtime) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index e59d80ed96b76..9073d3935852f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -11,6 +11,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveBinaryComparison; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; @@ -43,6 +44,7 @@ import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; import org.elasticsearch.xpack.ql.expression.TypedAttribute; +import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.ql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.expression.predicate.logical.BinaryLogic; @@ -62,6 +64,7 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -113,7 +116,7 @@ protected List> rules(boolean optimizeForEsSource) { var pushdown = new Batch("Push to ES", esSourceRules.toArray(Rule[]::new)); // add the field extraction in just one pass // add it at the end after all the other rules have ran - var fieldExtraction = new Batch<>("Field extraction", Limiter.ONCE, new InsertFieldExtraction()); + var fieldExtraction = new Batch<>("Field extraction", Limiter.ONCE, new InsertFieldExtraction(), new SpatialDocValuesExtraction()); return asList(pushdown, fieldExtraction); } @@ -236,6 +239,8 @@ protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext public static boolean canPushToSource(Expression exp, Predicate hasIdenticalDelegate) { if (exp instanceof BinaryComparison bc) { return isAttributePushable(bc.left(), bc, hasIdenticalDelegate) && bc.right().foldable(); + } else if (exp instanceof InsensitiveBinaryComparison bc) { + return isAttributePushable(bc.left(), bc, hasIdenticalDelegate) && bc.right().foldable(); } else if (exp instanceof BinaryLogic bl) { return canPushToSource(bl.left(), hasIdenticalDelegate) && canPushToSource(bl.right(), hasIdenticalDelegate); } else if (exp instanceof In in) { @@ -427,4 +432,52 @@ public static boolean isPushableFieldAttribute(Expression exp, Predicate { + @Override + protected PhysicalPlan rule(AggregateExec aggregate) { + var foundAttributes = new HashSet(); + + PhysicalPlan plan = aggregate.transformDown(UnaryExec.class, exec -> { + if (exec instanceof AggregateExec agg) { + var orderedAggregates = new ArrayList(); + var changedAggregates = false; + for (NamedExpression aggExpr : agg.aggregates()) { + if (aggExpr instanceof Alias as && as.child() instanceof SpatialAggregateFunction af) { + if (af.field() instanceof FieldAttribute fieldAttribute) { + // We need to both mark the field to load differently, and change the spatial function to know to use it + foundAttributes.add(fieldAttribute); + changedAggregates = true; + orderedAggregates.add(as.replaceChild(af.withDocValues())); + } else { + orderedAggregates.add(aggExpr); + } + } else { + orderedAggregates.add(aggExpr); + } + } + if (changedAggregates) { + exec = new AggregateExec( + agg.source(), + agg.child(), + agg.groupings(), + orderedAggregates, + agg.getMode(), + agg.estimatedRowSize() + ); + } + } + if (exec instanceof FieldExtractExec fieldExtractExec) { + // Tell the field extractor that it should extract the field from doc-values instead of source values + for (Attribute found : foundAttributes) { + if (fieldExtractExec.attributesToExtract().contains(found)) { + fieldExtractExec = fieldExtractExec.preferDocValues(found); + } + } + exec = fieldExtractExec; + } + return exec; + }); + return plan; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index e4f67838731a0..81f712ae0408a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -153,6 +153,7 @@ protected static List> rules() { Limiter.ONCE, new SubstituteSurrogates(), new ReplaceRegexMatch(), + new ReplaceNestedExpressionWithEval(), new ReplaceAliasingEvalWithProject() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); @@ -183,14 +184,14 @@ protected LogicalPlan rule(Aggregate aggregate) { // first pass to check existing aggregates (to avoid duplication and alias waste) for (NamedExpression agg : aggs) { - if (agg instanceof Alias a && a.child() instanceof AggregateFunction af && af instanceof SurrogateExpression == false) { - aggFuncToAttr.put(af, a.toAttribute()); + if (Alias.unwrap(agg) instanceof AggregateFunction af && af instanceof SurrogateExpression == false) { + aggFuncToAttr.put(af, agg.toAttribute()); } } // 0. check list of surrogate expressions for (NamedExpression agg : aggs) { - Expression e = agg instanceof Alias a ? a.child() : agg; + Expression e = Alias.unwrap(agg); if (e instanceof SurrogateExpression sf) { changed = true; Expression s = sf.surrogate(); @@ -245,7 +246,7 @@ protected LogicalPlan rule(Aggregate aggregate) { return plan; } - private static String temporaryName(NamedExpression agg, AggregateFunction af) { + static String temporaryName(NamedExpression agg, AggregateFunction af) { return "__" + agg.name() + "_" + af.functionName() + "@" + Integer.toHexString(af.hashCode()); } } @@ -405,17 +406,25 @@ static class PropagateEvalFoldables extends Rule { @Override public LogicalPlan apply(LogicalPlan plan) { var collectRefs = new AttributeMap(); - // collect aliases + + java.util.function.Function replaceReference = r -> collectRefs.resolve(r, r); + + // collect aliases bottom-up plan.forEachExpressionUp(Alias.class, a -> { var c = a.child(); - if (c.foldable()) { - collectRefs.put(a.toAttribute(), c); + boolean shouldCollect = c.foldable(); + // try to resolve the expression based on an existing foldables + if (shouldCollect == false) { + c = c.transformUp(ReferenceAttribute.class, replaceReference); + shouldCollect = c.foldable(); + } + if (shouldCollect) { + collectRefs.put(a.toAttribute(), Literal.of(c)); } }); if (collectRefs.isEmpty()) { return plan; } - java.util.function.Function replaceReference = r -> collectRefs.resolve(r, r); plan = plan.transformUp(p -> { // Apply the replacement inside Filter and Eval (which shouldn't make a difference) @@ -660,7 +669,7 @@ private List aggsFromEmpty(List aggs) { int i = 0; for (var agg : aggs) { // there needs to be an alias - if (agg instanceof Alias a && a.child() instanceof AggregateFunction aggFunc) { + if (Alias.unwrap(agg) instanceof AggregateFunction aggFunc) { aggOutput(agg, aggFunc, blockFactory, blocks); } else { throw new EsqlIllegalArgumentException("Did not expect a non-aliased aggregation {}", agg); @@ -1056,6 +1065,111 @@ protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { } } + /** + * Replace nested expressions inside an aggregate with synthetic eval (which end up being projected away by the aggregate). + * stats sum(a + 1) by x % 2 + * becomes + * eval `a + 1` = a + 1, `x % 2` = x % 2 | stats sum(`a+1`_ref) by `x % 2`_ref + */ + static class ReplaceNestedExpressionWithEval extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(Aggregate aggregate) { + List evals = new ArrayList<>(); + Map evalNames = new HashMap<>(); + List newGroupings = new ArrayList<>(aggregate.groupings()); + boolean groupingChanged = false; + + // start with the groupings since the aggs might duplicate it + for (int i = 0, s = newGroupings.size(); i < s; i++) { + Expression g = newGroupings.get(i); + // move the alias into an eval and replace it with its attribute + if (g instanceof Alias as) { + groupingChanged = true; + var attr = as.toAttribute(); + evals.add(as); + evalNames.put(as.name(), attr); + newGroupings.set(i, attr); + } + } + + Holder aggsChanged = new Holder<>(false); + List aggs = aggregate.aggregates(); + List newAggs = new ArrayList<>(aggs.size()); + + // map to track common expressions + Map expToAttribute = new HashMap<>(); + for (Alias a : evals) { + expToAttribute.put(a.child().canonical(), a.toAttribute()); + } + + // for the aggs make sure to unwrap the agg function and check the existing groupings + for (int i = 0, s = aggs.size(); i < s; i++) { + NamedExpression agg = aggs.get(i); + + NamedExpression a = (NamedExpression) agg.transformDown(Alias.class, as -> { + // if the child a nested expression + Expression child = as.child(); + + // shortcut for common scenario + if (child instanceof AggregateFunction af && af.field() instanceof Attribute) { + return as; + } + + // check if the alias matches any from grouping otherwise unwrap it + Attribute ref = evalNames.get(as.name()); + if (ref != null) { + aggsChanged.set(true); + return ref; + } + + // TODO: break expression into aggregate functions (sum(x + 1) / max(y + 2)) + // List afs = a.collectFirstChildren(AggregateFunction.class::isInstance); + + // 1. look for the aggregate function + var replaced = child.transformUp(AggregateFunction.class, af -> { + Expression result = af; + + Expression field = af.field(); + // 2. if the field is a nested expression (not attribute or literal), replace it + if (field instanceof Attribute == false && field.foldable() == false) { + // 3. create a new alias if one doesn't exist yet no reference + Attribute attr = expToAttribute.computeIfAbsent(field.canonical(), k -> { + Alias newAlias = new Alias(k.source(), temporaryName(agg, af), null, k, null, true); + evals.add(newAlias); + aggsChanged.set(true); + return newAlias.toAttribute(); + }); + // replace field with attribute + List newChildren = new ArrayList<>(af.children()); + newChildren.set(0, attr); + result = af.replaceChildren(newChildren); + } + return result; + }); + + return as.replaceChild(replaced); + }); + + newAggs.add(a); + } + + if (evals.size() > 0) { + var groupings = groupingChanged ? newGroupings : aggregate.groupings(); + var aggregates = aggsChanged.get() ? newAggs : aggregate.aggregates(); + + var newEval = new Eval(aggregate.source(), aggregate.child(), evals); + aggregate = new Aggregate(aggregate.source(), newEval, groupings, aggregates); + } + + return aggregate; + } + + static String temporaryName(NamedExpression agg, AggregateFunction af) { + return SubstituteSurrogates.temporaryName(agg, af); + } + } + /** * Replace aliasing evals (eval x=a) with a projection which can be further combined / simplified. * The rule gets applied only if there's another project (Project/Stats) above it. @@ -1182,20 +1296,19 @@ public LogicalPlan apply(LogicalPlan plan) { private static LogicalPlan normalize(Aggregate aggregate, AttributeMap aliases) { var aggs = aggregate.aggregates(); List newAggs = new ArrayList<>(aggs.size()); - boolean changed = false; + final Holder changed = new Holder<>(false); for (NamedExpression agg : aggs) { - if (agg instanceof Alias as && as.child() instanceof AggregateFunction af) { + var newAgg = (NamedExpression) agg.transformDown(AggregateFunction.class, af -> { // replace field reference if (af.field() instanceof NamedExpression ne) { Attribute attr = ne.toAttribute(); var resolved = aliases.resolve(attr, attr); if (resolved != attr) { - changed = true; + changed.set(true); var newChildren = CollectionUtils.combine(Collections.singletonList(resolved), af.parameters()); // update the reference so Count can pick it up af = (AggregateFunction) af.replaceChildren(newChildren); - agg = as.replaceChild(af); } } // handle Count(*) @@ -1204,16 +1317,17 @@ private static LogicalPlan normalize(Aggregate aggregate, AttributeMap'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, - null, null, null, null, "'metadata'", null, null, null, null, null, null, - null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, - null, null, null, null, null, null, "'info'", "'functions'" + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", "'functions'", + null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -108,17 +113,18 @@ private static String[] makeSymbolicNames() { "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS" + "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", + "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "UNQUOTED_ID_PATTERN", "PROJECT_LINE_COMMENT", + "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", + "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -181,44 +187,48 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000b\u0442\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000i\u0489\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ - "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ - "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ - "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ - "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ - "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ - "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ - "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ - "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ - "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ - "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ - " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ - "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ - "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ - "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ - "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ - "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ - ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ - "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ - "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ - "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ - "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ - "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ - "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ - "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ - "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ - "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ - "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ - "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ - "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ - "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ - "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ - "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ - "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ - "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ - "\u008e\u0007\u008e\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ + "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ + "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ + "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ + "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ + "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ + "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ + "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ + "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ + "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ + "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ + "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ + "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ + "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ + "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ + "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ + "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ + "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ + "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ + "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ + "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ + "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ + "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ + "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ + "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ + "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ + "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ + "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ + "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ + "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ + "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ + "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ + "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ + "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ + "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ + "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ + "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ + "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ + "\u0099\u0007\u0099\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ @@ -241,31 +251,31 @@ public EsqlBaseLexer(CharStream input) { "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01c3\b\u0012"+ - "\u000b\u0012\f\u0012\u01c4\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0005\u0013\u01cd\b\u0013\n\u0013\f\u0013\u01d0"+ - "\t\u0013\u0001\u0013\u0003\u0013\u01d3\b\u0013\u0001\u0013\u0003\u0013"+ - "\u01d6\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0001\u0014\u0005\u0014\u01df\b\u0014\n\u0014\f\u0014\u01e2"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01da\b\u0012"+ + "\u000b\u0012\f\u0012\u01db\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0005\u0013\u01e4\b\u0013\n\u0013\f\u0013\u01e7"+ + "\t\u0013\u0001\u0013\u0003\u0013\u01ea\b\u0013\u0001\u0013\u0003\u0013"+ + "\u01ed\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0005\u0014\u01f6\b\u0014\n\u0014\f\u0014\u01f9"+ "\t\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0004\u0015\u01ea\b\u0015\u000b\u0015\f\u0015\u01eb\u0001\u0015"+ + "\u0015\u0004\u0015\u0201\b\u0015\u000b\u0015\f\u0015\u0202\u0001\u0015"+ "\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d"+ "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ - "\u0001 \u0001 \u0003 \u0215\b \u0001 \u0004 \u0218\b \u000b \f \u0219"+ - "\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0223\b#\u0001"+ - "$\u0001$\u0001%\u0001%\u0001%\u0003%\u022a\b%\u0001&\u0001&\u0001&\u0005"+ - "&\u022f\b&\n&\f&\u0232\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005"+ - "&\u023a\b&\n&\f&\u023d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0244"+ - "\b&\u0001&\u0003&\u0247\b&\u0003&\u0249\b&\u0001\'\u0004\'\u024c\b\'\u000b"+ - "\'\f\'\u024d\u0001(\u0004(\u0251\b(\u000b(\f(\u0252\u0001(\u0001(\u0005"+ - "(\u0257\b(\n(\f(\u025a\t(\u0001(\u0001(\u0004(\u025e\b(\u000b(\f(\u025f"+ - "\u0001(\u0004(\u0263\b(\u000b(\f(\u0264\u0001(\u0001(\u0005(\u0269\b("+ - "\n(\f(\u026c\t(\u0003(\u026e\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0274"+ - "\b(\u000b(\f(\u0275\u0001(\u0001(\u0003(\u027a\b(\u0001)\u0001)\u0001"+ + "\u0001 \u0001 \u0003 \u022c\b \u0001 \u0004 \u022f\b \u000b \f \u0230"+ + "\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u023a\b#\u0001"+ + "$\u0001$\u0001%\u0001%\u0001%\u0003%\u0241\b%\u0001&\u0001&\u0001&\u0005"+ + "&\u0246\b&\n&\f&\u0249\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005"+ + "&\u0251\b&\n&\f&\u0254\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u025b"+ + "\b&\u0001&\u0003&\u025e\b&\u0003&\u0260\b&\u0001\'\u0004\'\u0263\b\'\u000b"+ + "\'\f\'\u0264\u0001(\u0004(\u0268\b(\u000b(\f(\u0269\u0001(\u0001(\u0005"+ + "(\u026e\b(\n(\f(\u0271\t(\u0001(\u0001(\u0004(\u0275\b(\u000b(\f(\u0276"+ + "\u0001(\u0004(\u027a\b(\u000b(\f(\u027b\u0001(\u0001(\u0005(\u0280\b("+ + "\n(\f(\u0283\t(\u0003(\u0285\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u028b"+ + "\b(\u000b(\f(\u028c\u0001(\u0001(\u0003(\u0291\b(\u0001)\u0001)\u0001"+ ")\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ ",\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ "0\u00010\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u0001"+ @@ -275,589 +285,636 @@ public EsqlBaseLexer(CharStream input) { "9\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+ "<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001"+ ">\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001"+ - "L\u0005L\u02f7\bL\nL\fL\u02fa\tL\u0001L\u0001L\u0003L\u02fe\bL\u0001L"+ - "\u0004L\u0301\bL\u000bL\fL\u0302\u0003L\u0305\bL\u0001M\u0001M\u0004M"+ - "\u0309\bM\u000bM\fM\u030a\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001"+ - "O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001"+ - "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ - "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001"+ - "V\u0001V\u0001W\u0001W\u0001W\u0003W\u0340\bW\u0001X\u0004X\u0343\bX\u000b"+ - "X\fX\u0344\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ - "]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001"+ - "_\u0001`\u0001`\u0001`\u0001`\u0003`\u0368\b`\u0001a\u0001a\u0003a\u036c"+ - "\ba\u0001a\u0005a\u036f\ba\na\fa\u0372\ta\u0001a\u0001a\u0003a\u0376\b"+ - "a\u0001a\u0004a\u0379\ba\u000ba\fa\u037a\u0003a\u037d\ba\u0001b\u0001"+ - "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ - "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ - "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ - "i\u0001i\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001"+ - "q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ - "|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ - "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ - "\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ - "\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0002\u01e0\u023b\u0000\u008f\n\u0001\f\u0002\u000e"+ - "\u0003\u0010\u0004\u0012\u0005\u0014\u0006\u0016\u0007\u0018\b\u001a\t"+ - "\u001c\n\u001e\u000b \f\"\r$\u000e&\u000f(\u0010*\u0011,\u0012.\u0013"+ - "0\u00142\u00154\u00166\u00008\u0000:\u0017<\u0018>\u0019@\u001aB\u0000"+ - "D\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u001b"+ - "X\u001cZ\u001d\\\u001e^\u001f` b!d\"f#h$j%l&n\'p(r)t*v+x,z-|.~/\u0080"+ - "0\u00821\u00842\u00863\u00884\u008a5\u008c6\u008e7\u00908\u00929\u0094"+ - ":\u0096;\u0098<\u009a=\u009c>\u009e?\u00a0@\u00a2A\u00a4B\u00a6C\u00a8"+ - "D\u00aaE\u00ac\u0000\u00ae\u0000\u00b0\u0000\u00b2\u0000\u00b4\u0000\u00b6"+ - "F\u00b8\u0000\u00baG\u00bc\u0000\u00beH\u00c0I\u00c2J\u00c4\u0000\u00c6"+ - "\u0000\u00c8\u0000\u00ca\u0000\u00ccK\u00ce\u0000\u00d0L\u00d2M\u00d4"+ - "N\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc\u0000\u00deO\u00e0\u0000\u00e2"+ - "\u0000\u00e4P\u00e6Q\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0\u0000\u00f2"+ - "\u0000\u00f4U\u00f6V\u00f8W\u00fa\u0000\u00fc\u0000\u00fe\u0000\u0100"+ - "\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108X\u010aY\u010cZ\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116[\u0118\\\u011a]\u011c"+ - "\u0000\u011e^\u0120_\u0122`\u0124a\u0126b\n\u0000\u0001\u0002\u0003\u0004"+ - "\u0005\u0006\u0007\b\t\f\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r"+ - "\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\""+ - "\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++-"+ - "-\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0458"+ - "\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000"+ - "\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000"+ - "\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000"+ - "\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000"+ - "\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000"+ - "\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\""+ - "\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000"+ - "\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000"+ - "\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000"+ - "\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000"+ - "\u0000\u0000\u00016\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000"+ - "\u0001:\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>"+ - "\u0001\u0000\u0000\u0000\u0002@\u0001\u0000\u0000\u0000\u0002V\u0001\u0000"+ - "\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000"+ - "\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002"+ - "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ - "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ - "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ - "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ - "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ - "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ - "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ - "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ - "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ - "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ - "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ - "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ - "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ - "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ - "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ - "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ - "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ - "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0003\u00ac\u0001"+ - "\u0000\u0000\u0000\u0003\u00ae\u0001\u0000\u0000\u0000\u0003\u00b0\u0001"+ - "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ - "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00ba\u0001"+ - "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ - "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ - "\u0000\u0000\u0000\u0004\u00c4\u0001\u0000\u0000\u0000\u0004\u00c6\u0001"+ - "\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000\u0000\u0004\u00cc\u0001"+ - "\u0000\u0000\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001"+ - "\u0000\u0000\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d4\u0001"+ - "\u0000\u0000\u0000\u0005\u00d6\u0001\u0000\u0000\u0000\u0005\u00d8\u0001"+ - "\u0000\u0000\u0000\u0005\u00da\u0001\u0000\u0000\u0000\u0005\u00dc\u0001"+ - "\u0000\u0000\u0000\u0005\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001"+ - "\u0000\u0000\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001"+ - "\u0000\u0000\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001"+ - "\u0000\u0000\u0000\u0006\u00ea\u0001\u0000\u0000\u0000\u0006\u00ec\u0001"+ - "\u0000\u0000\u0000\u0006\u00ee\u0001\u0000\u0000\u0000\u0006\u00f0\u0001"+ - "\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001"+ - "\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ - "\u0000\u0000\u0000\u0007\u00fa\u0001\u0000\u0000\u0000\u0007\u00fc\u0001"+ - "\u0000\u0000\u0000\u0007\u00fe\u0001\u0000\u0000\u0000\u0007\u0100\u0001"+ - "\u0000\u0000\u0000\u0007\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ - "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ - "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ - "\u0000\u0000\u0000\b\u010e\u0001\u0000\u0000\u0000\b\u0110\u0001\u0000"+ - "\u0000\u0000\b\u0112\u0001\u0000\u0000\u0000\b\u0114\u0001\u0000\u0000"+ - "\u0000\b\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000\u0000\u0000"+ - "\b\u011a\u0001\u0000\u0000\u0000\t\u011c\u0001\u0000\u0000\u0000\t\u011e"+ - "\u0001\u0000\u0000\u0000\t\u0120\u0001\u0000\u0000\u0000\t\u0122\u0001"+ - "\u0000\u0000\u0000\t\u0124\u0001\u0000\u0000\u0000\t\u0126\u0001\u0000"+ - "\u0000\u0000\n\u0128\u0001\u0000\u0000\u0000\f\u0132\u0001\u0000\u0000"+ - "\u0000\u000e\u0139\u0001\u0000\u0000\u0000\u0010\u0142\u0001\u0000\u0000"+ - "\u0000\u0012\u0149\u0001\u0000\u0000\u0000\u0014\u0153\u0001\u0000\u0000"+ - "\u0000\u0016\u015a\u0001\u0000\u0000\u0000\u0018\u0161\u0001\u0000\u0000"+ - "\u0000\u001a\u016f\u0001\u0000\u0000\u0000\u001c\u0176\u0001\u0000\u0000"+ - "\u0000\u001e\u017e\u0001\u0000\u0000\u0000 \u018a\u0001\u0000\u0000\u0000"+ - "\"\u0194\u0001\u0000\u0000\u0000$\u019d\u0001\u0000\u0000\u0000&\u01a3"+ - "\u0001\u0000\u0000\u0000(\u01aa\u0001\u0000\u0000\u0000*\u01b1\u0001\u0000"+ - "\u0000\u0000,\u01b9\u0001\u0000\u0000\u0000.\u01c2\u0001\u0000\u0000\u0000"+ - "0\u01c8\u0001\u0000\u0000\u00002\u01d9\u0001\u0000\u0000\u00004\u01e9"+ - "\u0001\u0000\u0000\u00006\u01ef\u0001\u0000\u0000\u00008\u01f4\u0001\u0000"+ - "\u0000\u0000:\u01f9\u0001\u0000\u0000\u0000<\u01fd\u0001\u0000\u0000\u0000"+ - ">\u0201\u0001\u0000\u0000\u0000@\u0205\u0001\u0000\u0000\u0000B\u0209"+ - "\u0001\u0000\u0000\u0000D\u020b\u0001\u0000\u0000\u0000F\u020d\u0001\u0000"+ - "\u0000\u0000H\u0210\u0001\u0000\u0000\u0000J\u0212\u0001\u0000\u0000\u0000"+ - "L\u021b\u0001\u0000\u0000\u0000N\u021d\u0001\u0000\u0000\u0000P\u0222"+ - "\u0001\u0000\u0000\u0000R\u0224\u0001\u0000\u0000\u0000T\u0229\u0001\u0000"+ - "\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u024b\u0001\u0000\u0000\u0000"+ - "Z\u0279\u0001\u0000\u0000\u0000\\\u027b\u0001\u0000\u0000\u0000^\u027e"+ - "\u0001\u0000\u0000\u0000`\u0282\u0001\u0000\u0000\u0000b\u0286\u0001\u0000"+ - "\u0000\u0000d\u0288\u0001\u0000\u0000\u0000f\u028a\u0001\u0000\u0000\u0000"+ - "h\u028f\u0001\u0000\u0000\u0000j\u0291\u0001\u0000\u0000\u0000l\u0297"+ - "\u0001\u0000\u0000\u0000n\u029d\u0001\u0000\u0000\u0000p\u02a2\u0001\u0000"+ - "\u0000\u0000r\u02a4\u0001\u0000\u0000\u0000t\u02a7\u0001\u0000\u0000\u0000"+ - "v\u02aa\u0001\u0000\u0000\u0000x\u02af\u0001\u0000\u0000\u0000z\u02b3"+ - "\u0001\u0000\u0000\u0000|\u02b8\u0001\u0000\u0000\u0000~\u02be\u0001\u0000"+ - "\u0000\u0000\u0080\u02c1\u0001\u0000\u0000\u0000\u0082\u02c3\u0001\u0000"+ - "\u0000\u0000\u0084\u02c9\u0001\u0000\u0000\u0000\u0086\u02cb\u0001\u0000"+ - "\u0000\u0000\u0088\u02d0\u0001\u0000\u0000\u0000\u008a\u02d3\u0001\u0000"+ - "\u0000\u0000\u008c\u02d6\u0001\u0000\u0000\u0000\u008e\u02d8\u0001\u0000"+ - "\u0000\u0000\u0090\u02db\u0001\u0000\u0000\u0000\u0092\u02dd\u0001\u0000"+ - "\u0000\u0000\u0094\u02e0\u0001\u0000\u0000\u0000\u0096\u02e2\u0001\u0000"+ - "\u0000\u0000\u0098\u02e4\u0001\u0000\u0000\u0000\u009a\u02e6\u0001\u0000"+ - "\u0000\u0000\u009c\u02e8\u0001\u0000\u0000\u0000\u009e\u02ea\u0001\u0000"+ - "\u0000\u0000\u00a0\u02ef\u0001\u0000\u0000\u0000\u00a2\u0304\u0001\u0000"+ - "\u0000\u0000\u00a4\u0306\u0001\u0000\u0000\u0000\u00a6\u030e\u0001\u0000"+ - "\u0000\u0000\u00a8\u0312\u0001\u0000\u0000\u0000\u00aa\u0316\u0001\u0000"+ - "\u0000\u0000\u00ac\u031a\u0001\u0000\u0000\u0000\u00ae\u031f\u0001\u0000"+ - "\u0000\u0000\u00b0\u0325\u0001\u0000\u0000\u0000\u00b2\u032b\u0001\u0000"+ - "\u0000\u0000\u00b4\u032f\u0001\u0000\u0000\u0000\u00b6\u0333\u0001\u0000"+ - "\u0000\u0000\u00b8\u033f\u0001\u0000\u0000\u0000\u00ba\u0342\u0001\u0000"+ - "\u0000\u0000\u00bc\u0346\u0001\u0000\u0000\u0000\u00be\u034a\u0001\u0000"+ - "\u0000\u0000\u00c0\u034e\u0001\u0000\u0000\u0000\u00c2\u0352\u0001\u0000"+ - "\u0000\u0000\u00c4\u0356\u0001\u0000\u0000\u0000\u00c6\u035b\u0001\u0000"+ - "\u0000\u0000\u00c8\u035f\u0001\u0000\u0000\u0000\u00ca\u0367\u0001\u0000"+ - "\u0000\u0000\u00cc\u037c\u0001\u0000\u0000\u0000\u00ce\u037e\u0001\u0000"+ - "\u0000\u0000\u00d0\u0382\u0001\u0000\u0000\u0000\u00d2\u0386\u0001\u0000"+ - "\u0000\u0000\u00d4\u038a\u0001\u0000\u0000\u0000\u00d6\u038e\u0001\u0000"+ - "\u0000\u0000\u00d8\u0393\u0001\u0000\u0000\u0000\u00da\u0397\u0001\u0000"+ - "\u0000\u0000\u00dc\u039b\u0001\u0000\u0000\u0000\u00de\u039f\u0001\u0000"+ - "\u0000\u0000\u00e0\u03a2\u0001\u0000\u0000\u0000\u00e2\u03a6\u0001\u0000"+ - "\u0000\u0000\u00e4\u03aa\u0001\u0000\u0000\u0000\u00e6\u03ae\u0001\u0000"+ - "\u0000\u0000\u00e8\u03b2\u0001\u0000\u0000\u0000\u00ea\u03b6\u0001\u0000"+ - "\u0000\u0000\u00ec\u03bb\u0001\u0000\u0000\u0000\u00ee\u03c0\u0001\u0000"+ - "\u0000\u0000\u00f0\u03c7\u0001\u0000\u0000\u0000\u00f2\u03cb\u0001\u0000"+ - "\u0000\u0000\u00f4\u03cf\u0001\u0000\u0000\u0000\u00f6\u03d3\u0001\u0000"+ - "\u0000\u0000\u00f8\u03d7\u0001\u0000\u0000\u0000\u00fa\u03db\u0001\u0000"+ - "\u0000\u0000\u00fc\u03e1\u0001\u0000\u0000\u0000\u00fe\u03e5\u0001\u0000"+ - "\u0000\u0000\u0100\u03e9\u0001\u0000\u0000\u0000\u0102\u03ed\u0001\u0000"+ - "\u0000\u0000\u0104\u03f1\u0001\u0000\u0000\u0000\u0106\u03f5\u0001\u0000"+ - "\u0000\u0000\u0108\u03f9\u0001\u0000\u0000\u0000\u010a\u03fd\u0001\u0000"+ - "\u0000\u0000\u010c\u0401\u0001\u0000\u0000\u0000\u010e\u0405\u0001\u0000"+ - "\u0000\u0000\u0110\u040a\u0001\u0000\u0000\u0000\u0112\u040e\u0001\u0000"+ - "\u0000\u0000\u0114\u0412\u0001\u0000\u0000\u0000\u0116\u0416\u0001\u0000"+ - "\u0000\u0000\u0118\u041a\u0001\u0000\u0000\u0000\u011a\u041e\u0001\u0000"+ - "\u0000\u0000\u011c\u0422\u0001\u0000\u0000\u0000\u011e\u0427\u0001\u0000"+ - "\u0000\u0000\u0120\u042c\u0001\u0000\u0000\u0000\u0122\u0436\u0001\u0000"+ - "\u0000\u0000\u0124\u043a\u0001\u0000\u0000\u0000\u0126\u043e\u0001\u0000"+ - "\u0000\u0000\u0128\u0129\u0005d\u0000\u0000\u0129\u012a\u0005i\u0000\u0000"+ - "\u012a\u012b\u0005s\u0000\u0000\u012b\u012c\u0005s\u0000\u0000\u012c\u012d"+ - "\u0005e\u0000\u0000\u012d\u012e\u0005c\u0000\u0000\u012e\u012f\u0005t"+ - "\u0000\u0000\u012f\u0130\u0001\u0000\u0000\u0000\u0130\u0131\u0006\u0000"+ - "\u0000\u0000\u0131\u000b\u0001\u0000\u0000\u0000\u0132\u0133\u0005d\u0000"+ - "\u0000\u0133\u0134\u0005r\u0000\u0000\u0134\u0135\u0005o\u0000\u0000\u0135"+ - "\u0136\u0005p\u0000\u0000\u0136\u0137\u0001\u0000\u0000\u0000\u0137\u0138"+ - "\u0006\u0001\u0001\u0000\u0138\r\u0001\u0000\u0000\u0000\u0139\u013a\u0005"+ - "e\u0000\u0000\u013a\u013b\u0005n\u0000\u0000\u013b\u013c\u0005r\u0000"+ - "\u0000\u013c\u013d\u0005i\u0000\u0000\u013d\u013e\u0005c\u0000\u0000\u013e"+ - "\u013f\u0005h\u0000\u0000\u013f\u0140\u0001\u0000\u0000\u0000\u0140\u0141"+ - "\u0006\u0002\u0002\u0000\u0141\u000f\u0001\u0000\u0000\u0000\u0142\u0143"+ - "\u0005e\u0000\u0000\u0143\u0144\u0005v\u0000\u0000\u0144\u0145\u0005a"+ - "\u0000\u0000\u0145\u0146\u0005l\u0000\u0000\u0146\u0147\u0001\u0000\u0000"+ - "\u0000\u0147\u0148\u0006\u0003\u0000\u0000\u0148\u0011\u0001\u0000\u0000"+ - "\u0000\u0149\u014a\u0005e\u0000\u0000\u014a\u014b\u0005x\u0000\u0000\u014b"+ - "\u014c\u0005p\u0000\u0000\u014c\u014d\u0005l\u0000\u0000\u014d\u014e\u0005"+ - "a\u0000\u0000\u014e\u014f\u0005i\u0000\u0000\u014f\u0150\u0005n\u0000"+ - "\u0000\u0150\u0151\u0001\u0000\u0000\u0000\u0151\u0152\u0006\u0004\u0003"+ - "\u0000\u0152\u0013\u0001\u0000\u0000\u0000\u0153\u0154\u0005f\u0000\u0000"+ - "\u0154\u0155\u0005r\u0000\u0000\u0155\u0156\u0005o\u0000\u0000\u0156\u0157"+ - "\u0005m\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u0158\u0159\u0006"+ - "\u0005\u0004\u0000\u0159\u0015\u0001\u0000\u0000\u0000\u015a\u015b\u0005"+ - "g\u0000\u0000\u015b\u015c\u0005r\u0000\u0000\u015c\u015d\u0005o\u0000"+ - "\u0000\u015d\u015e\u0005k\u0000\u0000\u015e\u015f\u0001\u0000\u0000\u0000"+ - "\u015f\u0160\u0006\u0006\u0000\u0000\u0160\u0017\u0001\u0000\u0000\u0000"+ - "\u0161\u0162\u0005i\u0000\u0000\u0162\u0163\u0005n\u0000\u0000\u0163\u0164"+ - "\u0005l\u0000\u0000\u0164\u0165\u0005i\u0000\u0000\u0165\u0166\u0005n"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005s\u0000\u0000"+ - "\u0168\u0169\u0005t\u0000\u0000\u0169\u016a\u0005a\u0000\u0000\u016a\u016b"+ - "\u0005t\u0000\u0000\u016b\u016c\u0005s\u0000\u0000\u016c\u016d\u0001\u0000"+ - "\u0000\u0000\u016d\u016e\u0006\u0007\u0000\u0000\u016e\u0019\u0001\u0000"+ - "\u0000\u0000\u016f\u0170\u0005k\u0000\u0000\u0170\u0171\u0005e\u0000\u0000"+ - "\u0171\u0172\u0005e\u0000\u0000\u0172\u0173\u0005p\u0000\u0000\u0173\u0174"+ - "\u0001\u0000\u0000\u0000\u0174\u0175\u0006\b\u0001\u0000\u0175\u001b\u0001"+ - "\u0000\u0000\u0000\u0176\u0177\u0005l\u0000\u0000\u0177\u0178\u0005i\u0000"+ - "\u0000\u0178\u0179\u0005m\u0000\u0000\u0179\u017a\u0005i\u0000\u0000\u017a"+ - "\u017b\u0005t\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c\u017d"+ - "\u0006\t\u0000\u0000\u017d\u001d\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ - "m\u0000\u0000\u017f\u0180\u0005v\u0000\u0000\u0180\u0181\u0005_\u0000"+ - "\u0000\u0181\u0182\u0005e\u0000\u0000\u0182\u0183\u0005x\u0000\u0000\u0183"+ - "\u0184\u0005p\u0000\u0000\u0184\u0185\u0005a\u0000\u0000\u0185\u0186\u0005"+ - "n\u0000\u0000\u0186\u0187\u0005d\u0000\u0000\u0187\u0188\u0001\u0000\u0000"+ - "\u0000\u0188\u0189\u0006\n\u0005\u0000\u0189\u001f\u0001\u0000\u0000\u0000"+ - "\u018a\u018b\u0005p\u0000\u0000\u018b\u018c\u0005r\u0000\u0000\u018c\u018d"+ - "\u0005o\u0000\u0000\u018d\u018e\u0005j\u0000\u0000\u018e\u018f\u0005e"+ - "\u0000\u0000\u018f\u0190\u0005c\u0000\u0000\u0190\u0191\u0005t\u0000\u0000"+ - "\u0191\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\u000b\u0001\u0000"+ - "\u0193!\u0001\u0000\u0000\u0000\u0194\u0195\u0005r\u0000\u0000\u0195\u0196"+ - "\u0005e\u0000\u0000\u0196\u0197\u0005n\u0000\u0000\u0197\u0198\u0005a"+ - "\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0005e\u0000\u0000"+ - "\u019a\u019b\u0001\u0000\u0000\u0000\u019b\u019c\u0006\f\u0006\u0000\u019c"+ - "#\u0001\u0000\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e\u019f\u0005"+ - "o\u0000\u0000\u019f\u01a0\u0005w\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ - "\u0000\u01a1\u01a2\u0006\r\u0000\u0000\u01a2%\u0001\u0000\u0000\u0000"+ - "\u01a3\u01a4\u0005s\u0000\u0000\u01a4\u01a5\u0005h\u0000\u0000\u01a5\u01a6"+ - "\u0005o\u0000\u0000\u01a6\u01a7\u0005w\u0000\u0000\u01a7\u01a8\u0001\u0000"+ - "\u0000\u0000\u01a8\u01a9\u0006\u000e\u0007\u0000\u01a9\'\u0001\u0000\u0000"+ - "\u0000\u01aa\u01ab\u0005s\u0000\u0000\u01ab\u01ac\u0005o\u0000\u0000\u01ac"+ - "\u01ad\u0005r\u0000\u0000\u01ad\u01ae\u0005t\u0000\u0000\u01ae\u01af\u0001"+ - "\u0000\u0000\u0000\u01af\u01b0\u0006\u000f\u0000\u0000\u01b0)\u0001\u0000"+ - "\u0000\u0000\u01b1\u01b2\u0005s\u0000\u0000\u01b2\u01b3\u0005t\u0000\u0000"+ - "\u01b3\u01b4\u0005a\u0000\u0000\u01b4\u01b5\u0005t\u0000\u0000\u01b5\u01b6"+ - "\u0005s\u0000\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7\u01b8\u0006"+ - "\u0010\u0000\u0000\u01b8+\u0001\u0000\u0000\u0000\u01b9\u01ba\u0005w\u0000"+ - "\u0000\u01ba\u01bb\u0005h\u0000\u0000\u01bb\u01bc\u0005e\u0000\u0000\u01bc"+ - "\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005e\u0000\u0000\u01be\u01bf\u0001"+ - "\u0000\u0000\u0000\u01bf\u01c0\u0006\u0011\u0000\u0000\u01c0-\u0001\u0000"+ - "\u0000\u0000\u01c1\u01c3\b\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000"+ - "\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c4\u01c5\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000"+ - "\u0000\u01c6\u01c7\u0006\u0012\u0000\u0000\u01c7/\u0001\u0000\u0000\u0000"+ - "\u01c8\u01c9\u0005/\u0000\u0000\u01c9\u01ca\u0005/\u0000\u0000\u01ca\u01ce"+ - "\u0001\u0000\u0000\u0000\u01cb\u01cd\b\u0001\u0000\u0000\u01cc\u01cb\u0001"+ - "\u0000\u0000\u0000\u01cd\u01d0\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001"+ - "\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d2\u0001"+ - "\u0000\u0000\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d3\u0005"+ - "\r\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000"+ - "\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d6\u0005\n\u0000"+ - "\u0000\u01d5\u01d4\u0001\u0000\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7\u01d8\u0006\u0013\b\u0000"+ - "\u01d81\u0001\u0000\u0000\u0000\u01d9\u01da\u0005/\u0000\u0000\u01da\u01db"+ - "\u0005*\u0000\u0000\u01db\u01e0\u0001\u0000\u0000\u0000\u01dc\u01df\u0003"+ - "2\u0014\u0000\u01dd\u01df\t\u0000\u0000\u0000\u01de\u01dc\u0001\u0000"+ - "\u0000\u0000\u01de\u01dd\u0001\u0000\u0000\u0000\u01df\u01e2\u0001\u0000"+ - "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000"+ - "\u0000\u0000\u01e1\u01e3\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000"+ - "\u0000\u0000\u01e3\u01e4\u0005*\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000"+ - "\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0006\u0014\b\u0000\u01e7"+ - "3\u0001\u0000\u0000\u0000\u01e8\u01ea\u0007\u0002\u0000\u0000\u01e9\u01e8"+ - "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01e9"+ - "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ec\u01ed"+ - "\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\u0015\b\u0000\u01ee5\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0003\u009eJ\u0000\u01f0\u01f1\u0001\u0000"+ - "\u0000\u0000\u01f1\u01f2\u0006\u0016\t\u0000\u01f2\u01f3\u0006\u0016\n"+ - "\u0000\u01f37\u0001\u0000\u0000\u0000\u01f4\u01f5\u0003@\u001b\u0000\u01f5"+ - "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0017\u000b\u0000\u01f7"+ - "\u01f8\u0006\u0017\f\u0000\u01f89\u0001\u0000\u0000\u0000\u01f9\u01fa"+ - "\u00034\u0015\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fc\u0006"+ - "\u0018\b\u0000\u01fc;\u0001\u0000\u0000\u0000\u01fd\u01fe\u00030\u0013"+ - "\u0000\u01fe\u01ff\u0001\u0000\u0000\u0000\u01ff\u0200\u0006\u0019\b\u0000"+ - "\u0200=\u0001\u0000\u0000\u0000\u0201\u0202\u00032\u0014\u0000\u0202\u0203"+ - "\u0001\u0000\u0000\u0000\u0203\u0204\u0006\u001a\b\u0000\u0204?\u0001"+ - "\u0000\u0000\u0000\u0205\u0206\u0005|\u0000\u0000\u0206\u0207\u0001\u0000"+ - "\u0000\u0000\u0207\u0208\u0006\u001b\f\u0000\u0208A\u0001\u0000\u0000"+ - "\u0000\u0209\u020a\u0007\u0003\u0000\u0000\u020aC\u0001\u0000\u0000\u0000"+ - "\u020b\u020c\u0007\u0004\u0000\u0000\u020cE\u0001\u0000\u0000\u0000\u020d"+ - "\u020e\u0005\\\u0000\u0000\u020e\u020f\u0007\u0005\u0000\u0000\u020fG"+ - "\u0001\u0000\u0000\u0000\u0210\u0211\b\u0006\u0000\u0000\u0211I\u0001"+ - "\u0000\u0000\u0000\u0212\u0214\u0007\u0007\u0000\u0000\u0213\u0215\u0007"+ - "\b\u0000\u0000\u0214\u0213\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000"+ - "\u0000\u0000\u0215\u0217\u0001\u0000\u0000\u0000\u0216\u0218\u0003B\u001c"+ - "\u0000\u0217\u0216\u0001\u0000\u0000\u0000\u0218\u0219\u0001\u0000\u0000"+ - "\u0000\u0219\u0217\u0001\u0000\u0000\u0000\u0219\u021a\u0001\u0000\u0000"+ - "\u0000\u021aK\u0001\u0000\u0000\u0000\u021b\u021c\u0005@\u0000\u0000\u021c"+ - "M\u0001\u0000\u0000\u0000\u021d\u021e\u0005`\u0000\u0000\u021eO\u0001"+ - "\u0000\u0000\u0000\u021f\u0223\b\t\u0000\u0000\u0220\u0221\u0005`\u0000"+ - "\u0000\u0221\u0223\u0005`\u0000\u0000\u0222\u021f\u0001\u0000\u0000\u0000"+ - "\u0222\u0220\u0001\u0000\u0000\u0000\u0223Q\u0001\u0000\u0000\u0000\u0224"+ - "\u0225\u0005_\u0000\u0000\u0225S\u0001\u0000\u0000\u0000\u0226\u022a\u0003"+ - "D\u001d\u0000\u0227\u022a\u0003B\u001c\u0000\u0228\u022a\u0003R$\u0000"+ - "\u0229\u0226\u0001\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000"+ - "\u0229\u0228\u0001\u0000\u0000\u0000\u022aU\u0001\u0000\u0000\u0000\u022b"+ - "\u0230\u0005\"\u0000\u0000\u022c\u022f\u0003F\u001e\u0000\u022d\u022f"+ - "\u0003H\u001f\u0000\u022e\u022c\u0001\u0000\u0000\u0000\u022e\u022d\u0001"+ - "\u0000\u0000\u0000\u022f\u0232\u0001\u0000\u0000\u0000\u0230\u022e\u0001"+ - "\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0233\u0001"+ - "\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000\u0233\u0249\u0005"+ - "\"\u0000\u0000\u0234\u0235\u0005\"\u0000\u0000\u0235\u0236\u0005\"\u0000"+ - "\u0000\u0236\u0237\u0005\"\u0000\u0000\u0237\u023b\u0001\u0000\u0000\u0000"+ - "\u0238\u023a\b\u0001\u0000\u0000\u0239\u0238\u0001\u0000\u0000\u0000\u023a"+ - "\u023d\u0001\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023b"+ - "\u0239\u0001\u0000\u0000\u0000\u023c\u023e\u0001\u0000\u0000\u0000\u023d"+ - "\u023b\u0001\u0000\u0000\u0000\u023e\u023f\u0005\"\u0000\u0000\u023f\u0240"+ - "\u0005\"\u0000\u0000\u0240\u0241\u0005\"\u0000\u0000\u0241\u0243\u0001"+ - "\u0000\u0000\u0000\u0242\u0244\u0005\"\u0000\u0000\u0243\u0242\u0001\u0000"+ - "\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0246\u0001\u0000"+ - "\u0000\u0000\u0245\u0247\u0005\"\u0000\u0000\u0246\u0245\u0001\u0000\u0000"+ - "\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247\u0249\u0001\u0000\u0000"+ - "\u0000\u0248\u022b\u0001\u0000\u0000\u0000\u0248\u0234\u0001\u0000\u0000"+ - "\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024c\u0003B\u001c\u0000\u024b"+ - "\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000\u024d"+ - "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "Y\u0001\u0000\u0000\u0000\u024f\u0251\u0003B\u001c\u0000\u0250\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0250\u0001"+ - "\u0000\u0000\u0000\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u0254\u0001"+ - "\u0000\u0000\u0000\u0254\u0258\u0003h/\u0000\u0255\u0257\u0003B\u001c"+ - "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0257\u025a\u0001\u0000\u0000"+ - "\u0000\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000"+ - "\u0000\u0259\u027a\u0001\u0000\u0000\u0000\u025a\u0258\u0001\u0000\u0000"+ - "\u0000\u025b\u025d\u0003h/\u0000\u025c\u025e\u0003B\u001c\u0000\u025d"+ - "\u025c\u0001\u0000\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f"+ - "\u025d\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260"+ - "\u027a\u0001\u0000\u0000\u0000\u0261\u0263\u0003B\u001c\u0000\u0262\u0261"+ - "\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0262"+ - "\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u026d"+ - "\u0001\u0000\u0000\u0000\u0266\u026a\u0003h/\u0000\u0267\u0269\u0003B"+ - "\u001c\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269\u026c\u0001\u0000"+ - "\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000"+ - "\u0000\u0000\u026b\u026e\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000"+ - "\u0000\u0000\u026d\u0266\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000"+ - "\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u0270\u0003J \u0000"+ - "\u0270\u027a\u0001\u0000\u0000\u0000\u0271\u0273\u0003h/\u0000\u0272\u0274"+ - "\u0003B\u001c\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001"+ - "\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0278\u0003"+ - "J \u0000\u0278\u027a\u0001\u0000\u0000\u0000\u0279\u0250\u0001\u0000\u0000"+ - "\u0000\u0279\u025b\u0001\u0000\u0000\u0000\u0279\u0262\u0001\u0000\u0000"+ - "\u0000\u0279\u0271\u0001\u0000\u0000\u0000\u027a[\u0001\u0000\u0000\u0000"+ - "\u027b\u027c\u0005b\u0000\u0000\u027c\u027d\u0005y\u0000\u0000\u027d]"+ - "\u0001\u0000\u0000\u0000\u027e\u027f\u0005a\u0000\u0000\u027f\u0280\u0005"+ - "n\u0000\u0000\u0280\u0281\u0005d\u0000\u0000\u0281_\u0001\u0000\u0000"+ - "\u0000\u0282\u0283\u0005a\u0000\u0000\u0283\u0284\u0005s\u0000\u0000\u0284"+ - "\u0285\u0005c\u0000\u0000\u0285a\u0001\u0000\u0000\u0000\u0286\u0287\u0005"+ - "=\u0000\u0000\u0287c\u0001\u0000\u0000\u0000\u0288\u0289\u0005,\u0000"+ - "\u0000\u0289e\u0001\u0000\u0000\u0000\u028a\u028b\u0005d\u0000\u0000\u028b"+ - "\u028c\u0005e\u0000\u0000\u028c\u028d\u0005s\u0000\u0000\u028d\u028e\u0005"+ - "c\u0000\u0000\u028eg\u0001\u0000\u0000\u0000\u028f\u0290\u0005.\u0000"+ - "\u0000\u0290i\u0001\u0000\u0000\u0000\u0291\u0292\u0005f\u0000\u0000\u0292"+ - "\u0293\u0005a\u0000\u0000\u0293\u0294\u0005l\u0000\u0000\u0294\u0295\u0005"+ - "s\u0000\u0000\u0295\u0296\u0005e\u0000\u0000\u0296k\u0001\u0000\u0000"+ - "\u0000\u0297\u0298\u0005f\u0000\u0000\u0298\u0299\u0005i\u0000\u0000\u0299"+ - "\u029a\u0005r\u0000\u0000\u029a\u029b\u0005s\u0000\u0000\u029b\u029c\u0005"+ - "t\u0000\u0000\u029cm\u0001\u0000\u0000\u0000\u029d\u029e\u0005l\u0000"+ - "\u0000\u029e\u029f\u0005a\u0000\u0000\u029f\u02a0\u0005s\u0000\u0000\u02a0"+ - "\u02a1\u0005t\u0000\u0000\u02a1o\u0001\u0000\u0000\u0000\u02a2\u02a3\u0005"+ - "(\u0000\u0000\u02a3q\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005i\u0000"+ - "\u0000\u02a5\u02a6\u0005n\u0000\u0000\u02a6s\u0001\u0000\u0000\u0000\u02a7"+ - "\u02a8\u0005i\u0000\u0000\u02a8\u02a9\u0005s\u0000\u0000\u02a9u\u0001"+ - "\u0000\u0000\u0000\u02aa\u02ab\u0005l\u0000\u0000\u02ab\u02ac\u0005i\u0000"+ - "\u0000\u02ac\u02ad\u0005k\u0000\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae"+ - "w\u0001\u0000\u0000\u0000\u02af\u02b0\u0005n\u0000\u0000\u02b0\u02b1\u0005"+ - "o\u0000\u0000\u02b1\u02b2\u0005t\u0000\u0000\u02b2y\u0001\u0000\u0000"+ - "\u0000\u02b3\u02b4\u0005n\u0000\u0000\u02b4\u02b5\u0005u\u0000\u0000\u02b5"+ - "\u02b6\u0005l\u0000\u0000\u02b6\u02b7\u0005l\u0000\u0000\u02b7{\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005n\u0000\u0000\u02b9\u02ba\u0005u\u0000"+ - "\u0000\u02ba\u02bb\u0005l\u0000\u0000\u02bb\u02bc\u0005l\u0000\u0000\u02bc"+ - "\u02bd\u0005s\u0000\u0000\u02bd}\u0001\u0000\u0000\u0000\u02be\u02bf\u0005"+ - "o\u0000\u0000\u02bf\u02c0\u0005r\u0000\u0000\u02c0\u007f\u0001\u0000\u0000"+ - "\u0000\u02c1\u02c2\u0005?\u0000\u0000\u02c2\u0081\u0001\u0000\u0000\u0000"+ - "\u02c3\u02c4\u0005r\u0000\u0000\u02c4\u02c5\u0005l\u0000\u0000\u02c5\u02c6"+ - "\u0005i\u0000\u0000\u02c6\u02c7\u0005k\u0000\u0000\u02c7\u02c8\u0005e"+ - "\u0000\u0000\u02c8\u0083\u0001\u0000\u0000\u0000\u02c9\u02ca\u0005)\u0000"+ - "\u0000\u02ca\u0085\u0001\u0000\u0000\u0000\u02cb\u02cc\u0005t\u0000\u0000"+ - "\u02cc\u02cd\u0005r\u0000\u0000\u02cd\u02ce\u0005u\u0000\u0000\u02ce\u02cf"+ - "\u0005e\u0000\u0000\u02cf\u0087\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005"+ - "=\u0000\u0000\u02d1\u02d2\u0005=\u0000\u0000\u02d2\u0089\u0001\u0000\u0000"+ - "\u0000\u02d3\u02d4\u0005!\u0000\u0000\u02d4\u02d5\u0005=\u0000\u0000\u02d5"+ - "\u008b\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005<\u0000\u0000\u02d7\u008d"+ - "\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005<\u0000\u0000\u02d9\u02da\u0005"+ - "=\u0000\u0000\u02da\u008f\u0001\u0000\u0000\u0000\u02db\u02dc\u0005>\u0000"+ - "\u0000\u02dc\u0091\u0001\u0000\u0000\u0000\u02dd\u02de\u0005>\u0000\u0000"+ - "\u02de\u02df\u0005=\u0000\u0000\u02df\u0093\u0001\u0000\u0000\u0000\u02e0"+ - "\u02e1\u0005+\u0000\u0000\u02e1\u0095\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0005-\u0000\u0000\u02e3\u0097\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005"+ - "*\u0000\u0000\u02e5\u0099\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005/\u0000"+ - "\u0000\u02e7\u009b\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005%\u0000\u0000"+ - "\u02e9\u009d\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005[\u0000\u0000\u02eb"+ - "\u02ec\u0001\u0000\u0000\u0000\u02ec\u02ed\u0006J\u0000\u0000\u02ed\u02ee"+ - "\u0006J\u0000\u0000\u02ee\u009f\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005"+ - "]\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f1\u02f2\u0006K\f"+ - "\u0000\u02f2\u02f3\u0006K\f\u0000\u02f3\u00a1\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f8\u0003D\u001d\u0000\u02f5\u02f7\u0003T%\u0000\u02f6\u02f5"+ - "\u0001\u0000\u0000\u0000\u02f7\u02fa\u0001\u0000\u0000\u0000\u02f8\u02f6"+ - "\u0001\u0000\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u0305"+ - "\u0001\u0000\u0000\u0000\u02fa\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fe"+ - "\u0003R$\u0000\u02fc\u02fe\u0003L!\u0000\u02fd\u02fb\u0001\u0000\u0000"+ - "\u0000\u02fd\u02fc\u0001\u0000\u0000\u0000\u02fe\u0300\u0001\u0000\u0000"+ - "\u0000\u02ff\u0301\u0003T%\u0000\u0300\u02ff\u0001\u0000\u0000\u0000\u0301"+ - "\u0302\u0001\u0000\u0000\u0000\u0302\u0300\u0001\u0000\u0000\u0000\u0302"+ - "\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000\u0000\u0000\u0304"+ - "\u02f4\u0001\u0000\u0000\u0000\u0304\u02fd\u0001\u0000\u0000\u0000\u0305"+ - "\u00a3\u0001\u0000\u0000\u0000\u0306\u0308\u0003N\"\u0000\u0307\u0309"+ - "\u0003P#\u0000\u0308\u0307\u0001\u0000\u0000\u0000\u0309\u030a\u0001\u0000"+ - "\u0000\u0000\u030a\u0308\u0001\u0000\u0000\u0000\u030a\u030b\u0001\u0000"+ - "\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030d\u0003N\""+ - "\u0000\u030d\u00a5\u0001\u0000\u0000\u0000\u030e\u030f\u00030\u0013\u0000"+ - "\u030f\u0310\u0001\u0000\u0000\u0000\u0310\u0311\u0006N\b\u0000\u0311"+ - "\u00a7\u0001\u0000\u0000\u0000\u0312\u0313\u00032\u0014\u0000\u0313\u0314"+ - "\u0001\u0000\u0000\u0000\u0314\u0315\u0006O\b\u0000\u0315\u00a9\u0001"+ - "\u0000\u0000\u0000\u0316\u0317\u00034\u0015\u0000\u0317\u0318\u0001\u0000"+ - "\u0000\u0000\u0318\u0319\u0006P\b\u0000\u0319\u00ab\u0001\u0000\u0000"+ - "\u0000\u031a\u031b\u0003@\u001b\u0000\u031b\u031c\u0001\u0000\u0000\u0000"+ - "\u031c\u031d\u0006Q\u000b\u0000\u031d\u031e\u0006Q\f\u0000\u031e\u00ad"+ - "\u0001\u0000\u0000\u0000\u031f\u0320\u0003\u009eJ\u0000\u0320\u0321\u0001"+ - "\u0000\u0000\u0000\u0321\u0322\u0006R\t\u0000\u0322\u0323\u0006R\u0004"+ - "\u0000\u0323\u0324\u0006R\u0004\u0000\u0324\u00af\u0001\u0000\u0000\u0000"+ - "\u0325\u0326\u0003\u00a0K\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ - "\u0328\u0006S\r\u0000\u0328\u0329\u0006S\f\u0000\u0329\u032a\u0006S\f"+ - "\u0000\u032a\u00b1\u0001\u0000\u0000\u0000\u032b\u032c\u0003d-\u0000\u032c"+ - "\u032d\u0001\u0000\u0000\u0000\u032d\u032e\u0006T\u000e\u0000\u032e\u00b3"+ - "\u0001\u0000\u0000\u0000\u032f\u0330\u0003b,\u0000\u0330\u0331\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0006U\u000f\u0000\u0332\u00b5\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0005m\u0000\u0000\u0334\u0335\u0005e\u0000\u0000\u0335"+ - "\u0336\u0005t\u0000\u0000\u0336\u0337\u0005a\u0000\u0000\u0337\u0338\u0005"+ - "d\u0000\u0000\u0338\u0339\u0005a\u0000\u0000\u0339\u033a\u0005t\u0000"+ - "\u0000\u033a\u033b\u0005a\u0000\u0000\u033b\u00b7\u0001\u0000\u0000\u0000"+ - "\u033c\u0340\b\n\u0000\u0000\u033d\u033e\u0005/\u0000\u0000\u033e\u0340"+ - "\b\u000b\u0000\u0000\u033f\u033c\u0001\u0000\u0000\u0000\u033f\u033d\u0001"+ - "\u0000\u0000\u0000\u0340\u00b9\u0001\u0000\u0000\u0000\u0341\u0343\u0003"+ - "\u00b8W\u0000\u0342\u0341\u0001\u0000\u0000\u0000\u0343\u0344\u0001\u0000"+ - "\u0000\u0000\u0344\u0342\u0001\u0000\u0000\u0000\u0344\u0345\u0001\u0000"+ - "\u0000\u0000\u0345\u00bb\u0001\u0000\u0000\u0000\u0346\u0347\u0003\u00a4"+ - "M\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006Y\u0010\u0000"+ - "\u0349\u00bd\u0001\u0000\u0000\u0000\u034a\u034b\u00030\u0013\u0000\u034b"+ - "\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006Z\b\u0000\u034d\u00bf"+ - "\u0001\u0000\u0000\u0000\u034e\u034f\u00032\u0014\u0000\u034f\u0350\u0001"+ - "\u0000\u0000\u0000\u0350\u0351\u0006[\b\u0000\u0351\u00c1\u0001\u0000"+ - "\u0000\u0000\u0352\u0353\u00034\u0015\u0000\u0353\u0354\u0001\u0000\u0000"+ - "\u0000\u0354\u0355\u0006\\\b\u0000\u0355\u00c3\u0001\u0000\u0000\u0000"+ - "\u0356\u0357\u0003@\u001b\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358"+ - "\u0359\u0006]\u000b\u0000\u0359\u035a\u0006]\f\u0000\u035a\u00c5\u0001"+ - "\u0000\u0000\u0000\u035b\u035c\u0003h/\u0000\u035c\u035d\u0001\u0000\u0000"+ - "\u0000\u035d\u035e\u0006^\u0011\u0000\u035e\u00c7\u0001\u0000\u0000\u0000"+ - "\u035f\u0360\u0003d-\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u0362"+ - "\u0006_\u000e\u0000\u0362\u00c9\u0001\u0000\u0000\u0000\u0363\u0368\u0003"+ - "D\u001d\u0000\u0364\u0368\u0003B\u001c\u0000\u0365\u0368\u0003R$\u0000"+ - "\u0366\u0368\u0003\u0098G\u0000\u0367\u0363\u0001\u0000\u0000\u0000\u0367"+ - "\u0364\u0001\u0000\u0000\u0000\u0367\u0365\u0001\u0000\u0000\u0000\u0367"+ - "\u0366\u0001\u0000\u0000\u0000\u0368\u00cb\u0001\u0000\u0000\u0000\u0369"+ - "\u036c\u0003D\u001d\u0000\u036a\u036c\u0003\u0098G\u0000\u036b\u0369\u0001"+ - "\u0000\u0000\u0000\u036b\u036a\u0001\u0000\u0000\u0000\u036c\u0370\u0001"+ - "\u0000\u0000\u0000\u036d\u036f\u0003\u00ca`\u0000\u036e\u036d\u0001\u0000"+ - "\u0000\u0000\u036f\u0372\u0001\u0000\u0000\u0000\u0370\u036e\u0001\u0000"+ - "\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000\u0371\u037d\u0001\u0000"+ - "\u0000\u0000\u0372\u0370\u0001\u0000\u0000\u0000\u0373\u0376\u0003R$\u0000"+ - "\u0374\u0376\u0003L!\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375\u0374"+ - "\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000\u0000\u0377\u0379"+ - "\u0003\u00ca`\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037a\u0001"+ - "\u0000\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001"+ - "\u0000\u0000\u0000\u037b\u037d\u0001\u0000\u0000\u0000\u037c\u036b\u0001"+ - "\u0000\u0000\u0000\u037c\u0375\u0001\u0000\u0000\u0000\u037d\u00cd\u0001"+ - "\u0000\u0000\u0000\u037e\u037f\u0003\u00a4M\u0000\u037f\u0380\u0001\u0000"+ - "\u0000\u0000\u0380\u0381\u0006b\u0010\u0000\u0381\u00cf\u0001\u0000\u0000"+ - "\u0000\u0382\u0383\u00030\u0013\u0000\u0383\u0384\u0001\u0000\u0000\u0000"+ - "\u0384\u0385\u0006c\b\u0000\u0385\u00d1\u0001\u0000\u0000\u0000\u0386"+ - "\u0387\u00032\u0014\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389"+ - "\u0006d\b\u0000\u0389\u00d3\u0001\u0000\u0000\u0000\u038a\u038b\u0003"+ - "4\u0015\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006e\b"+ - "\u0000\u038d\u00d5\u0001\u0000\u0000\u0000\u038e\u038f\u0003@\u001b\u0000"+ - "\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006f\u000b\u0000\u0391"+ - "\u0392\u0006f\f\u0000\u0392\u00d7\u0001\u0000\u0000\u0000\u0393\u0394"+ - "\u0003b,\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396\u0006g"+ - "\u000f\u0000\u0396\u00d9\u0001\u0000\u0000\u0000\u0397\u0398\u0003d-\u0000"+ - "\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006h\u000e\u0000\u039a"+ - "\u00db\u0001\u0000\u0000\u0000\u039b\u039c\u0003h/\u0000\u039c\u039d\u0001"+ - "\u0000\u0000\u0000\u039d\u039e\u0006i\u0011\u0000\u039e\u00dd\u0001\u0000"+ - "\u0000\u0000\u039f\u03a0\u0005a\u0000\u0000\u03a0\u03a1\u0005s\u0000\u0000"+ - "\u03a1\u00df\u0001\u0000\u0000\u0000\u03a2\u03a3\u0003\u00a4M\u0000\u03a3"+ - "\u03a4\u0001\u0000\u0000\u0000\u03a4\u03a5\u0006k\u0010\u0000\u03a5\u00e1"+ - "\u0001\u0000\u0000\u0000\u03a6\u03a7\u0003\u00cca\u0000\u03a7\u03a8\u0001"+ - "\u0000\u0000\u0000\u03a8\u03a9\u0006l\u0012\u0000\u03a9\u00e3\u0001\u0000"+ - "\u0000\u0000\u03aa\u03ab\u00030\u0013\u0000\u03ab\u03ac\u0001\u0000\u0000"+ - "\u0000\u03ac\u03ad\u0006m\b\u0000\u03ad\u00e5\u0001\u0000\u0000\u0000"+ - "\u03ae\u03af\u00032\u0014\u0000\u03af\u03b0\u0001\u0000\u0000\u0000\u03b0"+ - "\u03b1\u0006n\b\u0000\u03b1\u00e7\u0001\u0000\u0000\u0000\u03b2\u03b3"+ - "\u00034\u0015\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u03b5\u0006"+ - "o\b\u0000\u03b5\u00e9\u0001\u0000\u0000\u0000\u03b6\u03b7\u0003@\u001b"+ - "\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006p\u000b\u0000"+ - "\u03b9\u03ba\u0006p\f\u0000\u03ba\u00eb\u0001\u0000\u0000\u0000\u03bb"+ - "\u03bc\u0005o\u0000\u0000\u03bc\u03bd\u0005n\u0000\u0000\u03bd\u03be\u0001"+ - "\u0000\u0000\u0000\u03be\u03bf\u0006q\u0013\u0000\u03bf\u00ed\u0001\u0000"+ - "\u0000\u0000\u03c0\u03c1\u0005w\u0000\u0000\u03c1\u03c2\u0005i\u0000\u0000"+ - "\u03c2\u03c3\u0005t\u0000\u0000\u03c3\u03c4\u0005h\u0000\u0000\u03c4\u03c5"+ - "\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006r\u0013\u0000\u03c6\u00ef\u0001"+ - "\u0000\u0000\u0000\u03c7\u03c8\u0003\u00baX\u0000\u03c8\u03c9\u0001\u0000"+ - "\u0000\u0000\u03c9\u03ca\u0006s\u0014\u0000\u03ca\u00f1\u0001\u0000\u0000"+ - "\u0000\u03cb\u03cc\u0003\u00a4M\u0000\u03cc\u03cd\u0001\u0000\u0000\u0000"+ - "\u03cd\u03ce\u0006t\u0010\u0000\u03ce\u00f3\u0001\u0000\u0000\u0000\u03cf"+ - "\u03d0\u00030\u0013\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2"+ - "\u0006u\b\u0000\u03d2\u00f5\u0001\u0000\u0000\u0000\u03d3\u03d4\u0003"+ - "2\u0014\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006v\b"+ - "\u0000\u03d6\u00f7\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0015\u0000"+ - "\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9\u03da\u0006w\b\u0000\u03da"+ - "\u00f9\u0001\u0000\u0000\u0000\u03db\u03dc\u0003@\u001b\u0000\u03dc\u03dd"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u0006x\u000b\u0000\u03de\u03df\u0006"+ - "x\f\u0000\u03df\u03e0\u0006x\f\u0000\u03e0\u00fb\u0001\u0000\u0000\u0000"+ - "\u03e1\u03e2\u0003b,\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3\u03e4"+ - "\u0006y\u000f\u0000\u03e4\u00fd\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003"+ - "d-\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006z\u000e"+ - "\u0000\u03e8\u00ff\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003h/\u0000\u03ea"+ - "\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006{\u0011\u0000\u03ec\u0101"+ - "\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00eer\u0000\u03ee\u03ef\u0001"+ - "\u0000\u0000\u0000\u03ef\u03f0\u0006|\u0015\u0000\u03f0\u0103\u0001\u0000"+ - "\u0000\u0000\u03f1\u03f2\u0003\u00cca\u0000\u03f2\u03f3\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0006}\u0012\u0000\u03f4\u0105\u0001\u0000\u0000\u0000"+ - "\u03f5\u03f6\u0003\u00a4M\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ - "\u03f8\u0006~\u0010\u0000\u03f8\u0107\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u00030\u0013\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ - "\u007f\b\u0000\u03fc\u0109\u0001\u0000\u0000\u0000\u03fd\u03fe\u00032"+ - "\u0014\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006\u0080"+ - "\b\u0000\u0400\u010b\u0001\u0000\u0000\u0000\u0401\u0402\u00034\u0015"+ - "\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006\u0081\b\u0000"+ - "\u0404\u010d\u0001\u0000\u0000\u0000\u0405\u0406\u0003@\u001b\u0000\u0406"+ - "\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006\u0082\u000b\u0000\u0408"+ - "\u0409\u0006\u0082\f\u0000\u0409\u010f\u0001\u0000\u0000\u0000\u040a\u040b"+ - "\u0003h/\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\u0083"+ - "\u0011\u0000\u040d\u0111\u0001\u0000\u0000\u0000\u040e\u040f\u0003\u00a4"+ - "M\u0000\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006\u0084\u0010"+ - "\u0000\u0411\u0113\u0001\u0000\u0000\u0000\u0412\u0413\u0003\u00a2L\u0000"+ - "\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006\u0085\u0016\u0000"+ - "\u0415\u0115\u0001\u0000\u0000\u0000\u0416\u0417\u00030\u0013\u0000\u0417"+ - "\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006\u0086\b\u0000\u0419\u0117"+ - "\u0001\u0000\u0000\u0000\u041a\u041b\u00032\u0014\u0000\u041b\u041c\u0001"+ - "\u0000\u0000\u0000\u041c\u041d\u0006\u0087\b\u0000\u041d\u0119\u0001\u0000"+ - "\u0000\u0000\u041e\u041f\u00034\u0015\u0000\u041f\u0420\u0001\u0000\u0000"+ - "\u0000\u0420\u0421\u0006\u0088\b\u0000\u0421\u011b\u0001\u0000\u0000\u0000"+ - "\u0422\u0423\u0003@\u001b\u0000\u0423\u0424\u0001\u0000\u0000\u0000\u0424"+ - "\u0425\u0006\u0089\u000b\u0000\u0425\u0426\u0006\u0089\f\u0000\u0426\u011d"+ - "\u0001\u0000\u0000\u0000\u0427\u0428\u0005i\u0000\u0000\u0428\u0429\u0005"+ - "n\u0000\u0000\u0429\u042a\u0005f\u0000\u0000\u042a\u042b\u0005o\u0000"+ - "\u0000\u042b\u011f\u0001\u0000\u0000\u0000\u042c\u042d\u0005f\u0000\u0000"+ - "\u042d\u042e\u0005u\u0000\u0000\u042e\u042f\u0005n\u0000\u0000\u042f\u0430"+ - "\u0005c\u0000\u0000\u0430\u0431\u0005t\u0000\u0000\u0431\u0432\u0005i"+ - "\u0000\u0000\u0432\u0433\u0005o\u0000\u0000\u0433\u0434\u0005n\u0000\u0000"+ - "\u0434\u0435\u0005s\u0000\u0000\u0435\u0121\u0001\u0000\u0000\u0000\u0436"+ - "\u0437\u00030\u0013\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439"+ - "\u0006\u008c\b\u0000\u0439\u0123\u0001\u0000\u0000\u0000\u043a\u043b\u0003"+ - "2\u0014\u0000\u043b\u043c\u0001\u0000\u0000\u0000\u043c\u043d\u0006\u008d"+ - "\b\u0000\u043d\u0125\u0001\u0000\u0000\u0000\u043e\u043f\u00034\u0015"+ - "\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440\u0441\u0006\u008e\b\u0000"+ - "\u0441\u0127\u0001\u0000\u0000\u00001\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\u01c4\u01ce\u01d2\u01d5\u01de\u01e0\u01eb\u0214\u0219"+ - "\u0222\u0229\u022e\u0230\u023b\u0243\u0246\u0248\u024d\u0252\u0258\u025f"+ - "\u0264\u026a\u026d\u0275\u0279\u02f8\u02fd\u0302\u0304\u030a\u033f\u0344"+ - "\u0367\u036b\u0370\u0375\u037a\u037c\u0017\u0005\u0002\u0000\u0005\u0004"+ - "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000"+ - "\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007?\u0000\u0005"+ - "\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007@\u0000\u0007\""+ - "\u0000\u0007!\u0000\u0007B\u0000\u0007$\u0000\u0007K\u0000\u0005\u0007"+ - "\u0000\u0007G\u0000\u0007T\u0000\u0007A\u0000"; + "A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001"+ + "L\u0001L\u0001M\u0001M\u0005M\u0311\bM\nM\fM\u0314\tM\u0001M\u0001M\u0003"+ + "M\u0318\bM\u0001M\u0004M\u031b\bM\u000bM\fM\u031c\u0003M\u031f\bM\u0001"+ + "N\u0001N\u0004N\u0323\bN\u000bN\fN\u0324\u0001N\u0001N\u0001O\u0001O\u0001"+ + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ + "T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001"+ + "V\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001"+ + "X\u0001X\u0001X\u0003X\u0356\bX\u0001Y\u0004Y\u0359\bY\u000bY\fY\u035a"+ + "\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0003a\u037e\ba\u0001b\u0001b\u0003b\u0382\bb\u0001"+ + "b\u0005b\u0385\bb\nb\fb\u0388\tb\u0001b\u0001b\u0003b\u038c\bb\u0001b"+ + "\u0004b\u038f\bb\u000bb\fb\u0390\u0003b\u0393\bb\u0001c\u0001c\u0001c"+ + "\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ + "f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001"+ + "h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001"+ + "j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ + "v\u0001v\u0001w\u0001w\u0003w\u03eb\bw\u0001w\u0005w\u03ee\bw\nw\fw\u03f1"+ + "\tw\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ + "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ + "|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ + "~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080"+ + "\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081"+ + "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083"+ + "\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ + "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086"+ + "\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087"+ + "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ + "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a"+ + "\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ + "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ + "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090"+ + "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ + "\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094"+ + "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0096\u0004\u0096\u047a\b\u0096\u000b\u0096"+ + "\f\u0096\u047b\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0002\u01f7\u0252\u0000\u009a\u000b\u0001\r\u0002\u000f\u0003"+ + "\u0011\u0004\u0013\u0005\u0015\u0006\u0017\u0007\u0019\b\u001b\t\u001d"+ + "\n\u001f\u000b!\f#\r%\u000e\'\u000f)\u0010+\u0011-\u0012/\u00131\u0014"+ + "3\u00155\u00167\u00009\u0000;\u0017=\u0018?\u0019A\u001aC\u0000E\u0000"+ + "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0000U\u0000W\u001bY\u001c"+ + "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ + "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ + ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5B\u00a7C\u00a9D\u00ab"+ + "E\u00adF\u00af\u0000\u00b1\u0000\u00b3\u0000\u00b5\u0000\u00b7\u0000\u00b9"+ + "G\u00bb\u0000\u00bdH\u00bf\u0000\u00c1I\u00c3J\u00c5K\u00c7\u0000\u00c9"+ + "\u0000\u00cb\u0000\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5M\u00d7"+ + "N\u00d9O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3P\u00e5"+ + "\u0000\u00e7\u0000\u00e9Q\u00ebR\u00edS\u00ef\u0000\u00f1\u0000\u00f3"+ + "T\u00f5U\u00f7\u0000\u00f9V\u00fb\u0000\u00fd\u0000\u00ffW\u0101X\u0103"+ + "Y\u0105\u0000\u0107\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f\u0000"+ + "\u0111\u0000\u0113Z\u0115[\u0117\\\u0119\u0000\u011b\u0000\u011d\u0000"+ + "\u011f\u0000\u0121]\u0123^\u0125_\u0127\u0000\u0129`\u012ba\u012db\u012f"+ + "c\u0131d\u0133\u0000\u0135e\u0137f\u0139g\u013bh\u013di\u000b\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\r\u0006\u0000\t\n\r\r //["+ + "[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000"+ + "AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000"+ + "EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002"+ + "\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04a4\u0000\u000b"+ + "\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000\u0000\u0000\u000f\u0001"+ + "\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001"+ + "\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001"+ + "\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001"+ + "\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001"+ + "\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000"+ + "\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000"+ + "\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-"+ + "\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000"+ + "\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000"+ + "\u00017\u0001\u0000\u0000\u0000\u00019\u0001\u0000\u0000\u0000\u0001;"+ + "\u0001\u0000\u0000\u0000\u0001=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000"+ + "\u0000\u0000\u0002A\u0001\u0000\u0000\u0000\u0002W\u0001\u0000\u0000\u0000"+ + "\u0002Y\u0001\u0000\u0000\u0000\u0002[\u0001\u0000\u0000\u0000\u0002]"+ + "\u0001\u0000\u0000\u0000\u0002_\u0001\u0000\u0000\u0000\u0002a\u0001\u0000"+ + "\u0000\u0000\u0002c\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000\u0000"+ + "\u0002g\u0001\u0000\u0000\u0000\u0002i\u0001\u0000\u0000\u0000\u0002k"+ + "\u0001\u0000\u0000\u0000\u0002m\u0001\u0000\u0000\u0000\u0002o\u0001\u0000"+ + "\u0000\u0000\u0002q\u0001\u0000\u0000\u0000\u0002s\u0001\u0000\u0000\u0000"+ + "\u0002u\u0001\u0000\u0000\u0000\u0002w\u0001\u0000\u0000\u0000\u0002y"+ + "\u0001\u0000\u0000\u0000\u0002{\u0001\u0000\u0000\u0000\u0002}\u0001\u0000"+ + "\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0002\u0081\u0001\u0000"+ + "\u0000\u0000\u0002\u0083\u0001\u0000\u0000\u0000\u0002\u0085\u0001\u0000"+ + "\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000"+ + "\u0000\u0000\u0002\u008b\u0001\u0000\u0000\u0000\u0002\u008d\u0001\u0000"+ + "\u0000\u0000\u0002\u008f\u0001\u0000\u0000\u0000\u0002\u0091\u0001\u0000"+ + "\u0000\u0000\u0002\u0093\u0001\u0000\u0000\u0000\u0002\u0095\u0001\u0000"+ + "\u0000\u0000\u0002\u0097\u0001\u0000\u0000\u0000\u0002\u0099\u0001\u0000"+ + "\u0000\u0000\u0002\u009b\u0001\u0000\u0000\u0000\u0002\u009d\u0001\u0000"+ + "\u0000\u0000\u0002\u009f\u0001\u0000\u0000\u0000\u0002\u00a1\u0001\u0000"+ + "\u0000\u0000\u0002\u00a3\u0001\u0000\u0000\u0000\u0002\u00a5\u0001\u0000"+ + "\u0000\u0000\u0002\u00a7\u0001\u0000\u0000\u0000\u0002\u00a9\u0001\u0000"+ + "\u0000\u0000\u0002\u00ab\u0001\u0000\u0000\u0000\u0002\u00ad\u0001\u0000"+ + "\u0000\u0000\u0003\u00af\u0001\u0000\u0000\u0000\u0003\u00b1\u0001\u0000"+ + "\u0000\u0000\u0003\u00b3\u0001\u0000\u0000\u0000\u0003\u00b5\u0001\u0000"+ + "\u0000\u0000\u0003\u00b7\u0001\u0000\u0000\u0000\u0003\u00b9\u0001\u0000"+ + "\u0000\u0000\u0003\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000"+ + "\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000"+ + "\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0004\u00c7\u0001\u0000"+ + "\u0000\u0000\u0004\u00c9\u0001\u0000\u0000\u0000\u0004\u00cb\u0001\u0000"+ + "\u0000\u0000\u0004\u00cf\u0001\u0000\u0000\u0000\u0004\u00d1\u0001\u0000"+ + "\u0000\u0000\u0004\u00d3\u0001\u0000\u0000\u0000\u0004\u00d5\u0001\u0000"+ + "\u0000\u0000\u0004\u00d7\u0001\u0000\u0000\u0000\u0004\u00d9\u0001\u0000"+ + "\u0000\u0000\u0005\u00db\u0001\u0000\u0000\u0000\u0005\u00dd\u0001\u0000"+ + "\u0000\u0000\u0005\u00df\u0001\u0000\u0000\u0000\u0005\u00e1\u0001\u0000"+ + "\u0000\u0000\u0005\u00e3\u0001\u0000\u0000\u0000\u0005\u00e5\u0001\u0000"+ + "\u0000\u0000\u0005\u00e7\u0001\u0000\u0000\u0000\u0005\u00e9\u0001\u0000"+ + "\u0000\u0000\u0005\u00eb\u0001\u0000\u0000\u0000\u0005\u00ed\u0001\u0000"+ + "\u0000\u0000\u0006\u00ef\u0001\u0000\u0000\u0000\u0006\u00f1\u0001\u0000"+ + "\u0000\u0000\u0006\u00f3\u0001\u0000\u0000\u0000\u0006\u00f5\u0001\u0000"+ + "\u0000\u0000\u0006\u00f9\u0001\u0000\u0000\u0000\u0006\u00fb\u0001\u0000"+ + "\u0000\u0000\u0006\u00fd\u0001\u0000\u0000\u0000\u0006\u00ff\u0001\u0000"+ + "\u0000\u0000\u0006\u0101\u0001\u0000\u0000\u0000\u0006\u0103\u0001\u0000"+ + "\u0000\u0000\u0007\u0105\u0001\u0000\u0000\u0000\u0007\u0107\u0001\u0000"+ + "\u0000\u0000\u0007\u0109\u0001\u0000\u0000\u0000\u0007\u010b\u0001\u0000"+ + "\u0000\u0000\u0007\u010d\u0001\u0000\u0000\u0000\u0007\u010f\u0001\u0000"+ + "\u0000\u0000\u0007\u0111\u0001\u0000\u0000\u0000\u0007\u0113\u0001\u0000"+ + "\u0000\u0000\u0007\u0115\u0001\u0000\u0000\u0000\u0007\u0117\u0001\u0000"+ + "\u0000\u0000\b\u0119\u0001\u0000\u0000\u0000\b\u011b\u0001\u0000\u0000"+ + "\u0000\b\u011d\u0001\u0000\u0000\u0000\b\u011f\u0001\u0000\u0000\u0000"+ + "\b\u0121\u0001\u0000\u0000\u0000\b\u0123\u0001\u0000\u0000\u0000\b\u0125"+ + "\u0001\u0000\u0000\u0000\t\u0127\u0001\u0000\u0000\u0000\t\u0129\u0001"+ + "\u0000\u0000\u0000\t\u012b\u0001\u0000\u0000\u0000\t\u012d\u0001\u0000"+ + "\u0000\u0000\t\u012f\u0001\u0000\u0000\u0000\t\u0131\u0001\u0000\u0000"+ + "\u0000\n\u0133\u0001\u0000\u0000\u0000\n\u0135\u0001\u0000\u0000\u0000"+ + "\n\u0137\u0001\u0000\u0000\u0000\n\u0139\u0001\u0000\u0000\u0000\n\u013b"+ + "\u0001\u0000\u0000\u0000\n\u013d\u0001\u0000\u0000\u0000\u000b\u013f\u0001"+ + "\u0000\u0000\u0000\r\u0149\u0001\u0000\u0000\u0000\u000f\u0150\u0001\u0000"+ + "\u0000\u0000\u0011\u0159\u0001\u0000\u0000\u0000\u0013\u0160\u0001\u0000"+ + "\u0000\u0000\u0015\u016a\u0001\u0000\u0000\u0000\u0017\u0171\u0001\u0000"+ + "\u0000\u0000\u0019\u0178\u0001\u0000\u0000\u0000\u001b\u0186\u0001\u0000"+ + "\u0000\u0000\u001d\u018d\u0001\u0000\u0000\u0000\u001f\u0195\u0001\u0000"+ + "\u0000\u0000!\u01a1\u0001\u0000\u0000\u0000#\u01ab\u0001\u0000\u0000\u0000"+ + "%\u01b4\u0001\u0000\u0000\u0000\'\u01ba\u0001\u0000\u0000\u0000)\u01c1"+ + "\u0001\u0000\u0000\u0000+\u01c8\u0001\u0000\u0000\u0000-\u01d0\u0001\u0000"+ + "\u0000\u0000/\u01d9\u0001\u0000\u0000\u00001\u01df\u0001\u0000\u0000\u0000"+ + "3\u01f0\u0001\u0000\u0000\u00005\u0200\u0001\u0000\u0000\u00007\u0206"+ + "\u0001\u0000\u0000\u00009\u020b\u0001\u0000\u0000\u0000;\u0210\u0001\u0000"+ + "\u0000\u0000=\u0214\u0001\u0000\u0000\u0000?\u0218\u0001\u0000\u0000\u0000"+ + "A\u021c\u0001\u0000\u0000\u0000C\u0220\u0001\u0000\u0000\u0000E\u0222"+ + "\u0001\u0000\u0000\u0000G\u0224\u0001\u0000\u0000\u0000I\u0227\u0001\u0000"+ + "\u0000\u0000K\u0229\u0001\u0000\u0000\u0000M\u0232\u0001\u0000\u0000\u0000"+ + "O\u0234\u0001\u0000\u0000\u0000Q\u0239\u0001\u0000\u0000\u0000S\u023b"+ + "\u0001\u0000\u0000\u0000U\u0240\u0001\u0000\u0000\u0000W\u025f\u0001\u0000"+ + "\u0000\u0000Y\u0262\u0001\u0000\u0000\u0000[\u0290\u0001\u0000\u0000\u0000"+ + "]\u0292\u0001\u0000\u0000\u0000_\u0295\u0001\u0000\u0000\u0000a\u0299"+ + "\u0001\u0000\u0000\u0000c\u029d\u0001\u0000\u0000\u0000e\u029f\u0001\u0000"+ + "\u0000\u0000g\u02a1\u0001\u0000\u0000\u0000i\u02a6\u0001\u0000\u0000\u0000"+ + "k\u02a8\u0001\u0000\u0000\u0000m\u02ae\u0001\u0000\u0000\u0000o\u02b4"+ + "\u0001\u0000\u0000\u0000q\u02b9\u0001\u0000\u0000\u0000s\u02bb\u0001\u0000"+ + "\u0000\u0000u\u02be\u0001\u0000\u0000\u0000w\u02c1\u0001\u0000\u0000\u0000"+ + "y\u02c6\u0001\u0000\u0000\u0000{\u02ca\u0001\u0000\u0000\u0000}\u02cf"+ + "\u0001\u0000\u0000\u0000\u007f\u02d5\u0001\u0000\u0000\u0000\u0081\u02d8"+ + "\u0001\u0000\u0000\u0000\u0083\u02da\u0001\u0000\u0000\u0000\u0085\u02e0"+ + "\u0001\u0000\u0000\u0000\u0087\u02e2\u0001\u0000\u0000\u0000\u0089\u02e7"+ + "\u0001\u0000\u0000\u0000\u008b\u02ea\u0001\u0000\u0000\u0000\u008d\u02ed"+ + "\u0001\u0000\u0000\u0000\u008f\u02f0\u0001\u0000\u0000\u0000\u0091\u02f2"+ + "\u0001\u0000\u0000\u0000\u0093\u02f5\u0001\u0000\u0000\u0000\u0095\u02f7"+ + "\u0001\u0000\u0000\u0000\u0097\u02fa\u0001\u0000\u0000\u0000\u0099\u02fc"+ + "\u0001\u0000\u0000\u0000\u009b\u02fe\u0001\u0000\u0000\u0000\u009d\u0300"+ + "\u0001\u0000\u0000\u0000\u009f\u0302\u0001\u0000\u0000\u0000\u00a1\u0304"+ + "\u0001\u0000\u0000\u0000\u00a3\u0309\u0001\u0000\u0000\u0000\u00a5\u031e"+ + "\u0001\u0000\u0000\u0000\u00a7\u0320\u0001\u0000\u0000\u0000\u00a9\u0328"+ + "\u0001\u0000\u0000\u0000\u00ab\u032c\u0001\u0000\u0000\u0000\u00ad\u0330"+ + "\u0001\u0000\u0000\u0000\u00af\u0334\u0001\u0000\u0000\u0000\u00b1\u0339"+ + "\u0001\u0000\u0000\u0000\u00b3\u033d\u0001\u0000\u0000\u0000\u00b5\u0341"+ + "\u0001\u0000\u0000\u0000\u00b7\u0345\u0001\u0000\u0000\u0000\u00b9\u0349"+ + "\u0001\u0000\u0000\u0000\u00bb\u0355\u0001\u0000\u0000\u0000\u00bd\u0358"+ + "\u0001\u0000\u0000\u0000\u00bf\u035c\u0001\u0000\u0000\u0000\u00c1\u0360"+ + "\u0001\u0000\u0000\u0000\u00c3\u0364\u0001\u0000\u0000\u0000\u00c5\u0368"+ + "\u0001\u0000\u0000\u0000\u00c7\u036c\u0001\u0000\u0000\u0000\u00c9\u0371"+ + "\u0001\u0000\u0000\u0000\u00cb\u0375\u0001\u0000\u0000\u0000\u00cd\u037d"+ + "\u0001\u0000\u0000\u0000\u00cf\u0392\u0001\u0000\u0000\u0000\u00d1\u0394"+ + "\u0001\u0000\u0000\u0000\u00d3\u0398\u0001\u0000\u0000\u0000\u00d5\u039c"+ + "\u0001\u0000\u0000\u0000\u00d7\u03a0\u0001\u0000\u0000\u0000\u00d9\u03a4"+ + "\u0001\u0000\u0000\u0000\u00db\u03a8\u0001\u0000\u0000\u0000\u00dd\u03ad"+ + "\u0001\u0000\u0000\u0000\u00df\u03b1\u0001\u0000\u0000\u0000\u00e1\u03b5"+ + "\u0001\u0000\u0000\u0000\u00e3\u03b9\u0001\u0000\u0000\u0000\u00e5\u03bc"+ + "\u0001\u0000\u0000\u0000\u00e7\u03c0\u0001\u0000\u0000\u0000\u00e9\u03c4"+ + "\u0001\u0000\u0000\u0000\u00eb\u03c8\u0001\u0000\u0000\u0000\u00ed\u03cc"+ + "\u0001\u0000\u0000\u0000\u00ef\u03d0\u0001\u0000\u0000\u0000\u00f1\u03d5"+ + "\u0001\u0000\u0000\u0000\u00f3\u03da\u0001\u0000\u0000\u0000\u00f5\u03df"+ + "\u0001\u0000\u0000\u0000\u00f7\u03e6\u0001\u0000\u0000\u0000\u00f9\u03ea"+ + "\u0001\u0000\u0000\u0000\u00fb\u03f2\u0001\u0000\u0000\u0000\u00fd\u03f6"+ + "\u0001\u0000\u0000\u0000\u00ff\u03fa\u0001\u0000\u0000\u0000\u0101\u03fe"+ + "\u0001\u0000\u0000\u0000\u0103\u0402\u0001\u0000\u0000\u0000\u0105\u0406"+ + "\u0001\u0000\u0000\u0000\u0107\u040c\u0001\u0000\u0000\u0000\u0109\u0410"+ + "\u0001\u0000\u0000\u0000\u010b\u0414\u0001\u0000\u0000\u0000\u010d\u0418"+ + "\u0001\u0000\u0000\u0000\u010f\u041c\u0001\u0000\u0000\u0000\u0111\u0420"+ + "\u0001\u0000\u0000\u0000\u0113\u0424\u0001\u0000\u0000\u0000\u0115\u0428"+ + "\u0001\u0000\u0000\u0000\u0117\u042c\u0001\u0000\u0000\u0000\u0119\u0430"+ + "\u0001\u0000\u0000\u0000\u011b\u0435\u0001\u0000\u0000\u0000\u011d\u0439"+ + "\u0001\u0000\u0000\u0000\u011f\u043d\u0001\u0000\u0000\u0000\u0121\u0441"+ + "\u0001\u0000\u0000\u0000\u0123\u0445\u0001\u0000\u0000\u0000\u0125\u0449"+ + "\u0001\u0000\u0000\u0000\u0127\u044d\u0001\u0000\u0000\u0000\u0129\u0452"+ + "\u0001\u0000\u0000\u0000\u012b\u0457\u0001\u0000\u0000\u0000\u012d\u0461"+ + "\u0001\u0000\u0000\u0000\u012f\u0465\u0001\u0000\u0000\u0000\u0131\u0469"+ + "\u0001\u0000\u0000\u0000\u0133\u046d\u0001\u0000\u0000\u0000\u0135\u0472"+ + "\u0001\u0000\u0000\u0000\u0137\u0479\u0001\u0000\u0000\u0000\u0139\u047d"+ + "\u0001\u0000\u0000\u0000\u013b\u0481\u0001\u0000\u0000\u0000\u013d\u0485"+ + "\u0001\u0000\u0000\u0000\u013f\u0140\u0005d\u0000\u0000\u0140\u0141\u0005"+ + "i\u0000\u0000\u0141\u0142\u0005s\u0000\u0000\u0142\u0143\u0005s\u0000"+ + "\u0000\u0143\u0144\u0005e\u0000\u0000\u0144\u0145\u0005c\u0000\u0000\u0145"+ + "\u0146\u0005t\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147\u0148"+ + "\u0006\u0000\u0000\u0000\u0148\f\u0001\u0000\u0000\u0000\u0149\u014a\u0005"+ + "d\u0000\u0000\u014a\u014b\u0005r\u0000\u0000\u014b\u014c\u0005o\u0000"+ + "\u0000\u014c\u014d\u0005p\u0000\u0000\u014d\u014e\u0001\u0000\u0000\u0000"+ + "\u014e\u014f\u0006\u0001\u0001\u0000\u014f\u000e\u0001\u0000\u0000\u0000"+ + "\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005n\u0000\u0000\u0152\u0153"+ + "\u0005r\u0000\u0000\u0153\u0154\u0005i\u0000\u0000\u0154\u0155\u0005c"+ + "\u0000\u0000\u0155\u0156\u0005h\u0000\u0000\u0156\u0157\u0001\u0000\u0000"+ + "\u0000\u0157\u0158\u0006\u0002\u0002\u0000\u0158\u0010\u0001\u0000\u0000"+ + "\u0000\u0159\u015a\u0005e\u0000\u0000\u015a\u015b\u0005v\u0000\u0000\u015b"+ + "\u015c\u0005a\u0000\u0000\u015c\u015d\u0005l\u0000\u0000\u015d\u015e\u0001"+ + "\u0000\u0000\u0000\u015e\u015f\u0006\u0003\u0000\u0000\u015f\u0012\u0001"+ + "\u0000\u0000\u0000\u0160\u0161\u0005e\u0000\u0000\u0161\u0162\u0005x\u0000"+ + "\u0000\u0162\u0163\u0005p\u0000\u0000\u0163\u0164\u0005l\u0000\u0000\u0164"+ + "\u0165\u0005a\u0000\u0000\u0165\u0166\u0005i\u0000\u0000\u0166\u0167\u0005"+ + "n\u0000\u0000\u0167\u0168\u0001\u0000\u0000\u0000\u0168\u0169\u0006\u0004"+ + "\u0003\u0000\u0169\u0014\u0001\u0000\u0000\u0000\u016a\u016b\u0005f\u0000"+ + "\u0000\u016b\u016c\u0005r\u0000\u0000\u016c\u016d\u0005o\u0000\u0000\u016d"+ + "\u016e\u0005m\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0170"+ + "\u0006\u0005\u0004\u0000\u0170\u0016\u0001\u0000\u0000\u0000\u0171\u0172"+ + "\u0005g\u0000\u0000\u0172\u0173\u0005r\u0000\u0000\u0173\u0174\u0005o"+ + "\u0000\u0000\u0174\u0175\u0005k\u0000\u0000\u0175\u0176\u0001\u0000\u0000"+ + "\u0000\u0176\u0177\u0006\u0006\u0000\u0000\u0177\u0018\u0001\u0000\u0000"+ + "\u0000\u0178\u0179\u0005i\u0000\u0000\u0179\u017a\u0005n\u0000\u0000\u017a"+ + "\u017b\u0005l\u0000\u0000\u017b\u017c\u0005i\u0000\u0000\u017c\u017d\u0005"+ + "n\u0000\u0000\u017d\u017e\u0005e\u0000\u0000\u017e\u017f\u0005s\u0000"+ + "\u0000\u017f\u0180\u0005t\u0000\u0000\u0180\u0181\u0005a\u0000\u0000\u0181"+ + "\u0182\u0005t\u0000\u0000\u0182\u0183\u0005s\u0000\u0000\u0183\u0184\u0001"+ + "\u0000\u0000\u0000\u0184\u0185\u0006\u0007\u0000\u0000\u0185\u001a\u0001"+ + "\u0000\u0000\u0000\u0186\u0187\u0005k\u0000\u0000\u0187\u0188\u0005e\u0000"+ + "\u0000\u0188\u0189\u0005e\u0000\u0000\u0189\u018a\u0005p\u0000\u0000\u018a"+ + "\u018b\u0001\u0000\u0000\u0000\u018b\u018c\u0006\b\u0001\u0000\u018c\u001c"+ + "\u0001\u0000\u0000\u0000\u018d\u018e\u0005l\u0000\u0000\u018e\u018f\u0005"+ + "i\u0000\u0000\u018f\u0190\u0005m\u0000\u0000\u0190\u0191\u0005i\u0000"+ + "\u0000\u0191\u0192\u0005t\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000"+ + "\u0193\u0194\u0006\t\u0000\u0000\u0194\u001e\u0001\u0000\u0000\u0000\u0195"+ + "\u0196\u0005m\u0000\u0000\u0196\u0197\u0005v\u0000\u0000\u0197\u0198\u0005"+ + "_\u0000\u0000\u0198\u0199\u0005e\u0000\u0000\u0199\u019a\u0005x\u0000"+ + "\u0000\u019a\u019b\u0005p\u0000\u0000\u019b\u019c\u0005a\u0000\u0000\u019c"+ + "\u019d\u0005n\u0000\u0000\u019d\u019e\u0005d\u0000\u0000\u019e\u019f\u0001"+ + "\u0000\u0000\u0000\u019f\u01a0\u0006\n\u0005\u0000\u01a0 \u0001\u0000"+ + "\u0000\u0000\u01a1\u01a2\u0005p\u0000\u0000\u01a2\u01a3\u0005r\u0000\u0000"+ + "\u01a3\u01a4\u0005o\u0000\u0000\u01a4\u01a5\u0005j\u0000\u0000\u01a5\u01a6"+ + "\u0005e\u0000\u0000\u01a6\u01a7\u0005c\u0000\u0000\u01a7\u01a8\u0005t"+ + "\u0000\u0000\u01a8\u01a9\u0001\u0000\u0000\u0000\u01a9\u01aa\u0006\u000b"+ + "\u0001\u0000\u01aa\"\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005r\u0000"+ + "\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae\u0005n\u0000\u0000\u01ae"+ + "\u01af\u0005a\u0000\u0000\u01af\u01b0\u0005m\u0000\u0000\u01b0\u01b1\u0005"+ + "e\u0000\u0000\u01b1\u01b2\u0001\u0000\u0000\u0000\u01b2\u01b3\u0006\f"+ + "\u0006\u0000\u01b3$\u0001\u0000\u0000\u0000\u01b4\u01b5\u0005r\u0000\u0000"+ + "\u01b5\u01b6\u0005o\u0000\u0000\u01b6\u01b7\u0005w\u0000\u0000\u01b7\u01b8"+ + "\u0001\u0000\u0000\u0000\u01b8\u01b9\u0006\r\u0000\u0000\u01b9&\u0001"+ + "\u0000\u0000\u0000\u01ba\u01bb\u0005s\u0000\u0000\u01bb\u01bc\u0005h\u0000"+ + "\u0000\u01bc\u01bd\u0005o\u0000\u0000\u01bd\u01be\u0005w\u0000\u0000\u01be"+ + "\u01bf\u0001\u0000\u0000\u0000\u01bf\u01c0\u0006\u000e\u0007\u0000\u01c0"+ + "(\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005s\u0000\u0000\u01c2\u01c3\u0005"+ + "o\u0000\u0000\u01c3\u01c4\u0005r\u0000\u0000\u01c4\u01c5\u0005t\u0000"+ + "\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u000f\u0000"+ + "\u0000\u01c7*\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005s\u0000\u0000\u01c9"+ + "\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005a\u0000\u0000\u01cb\u01cc\u0005"+ + "t\u0000\u0000\u01cc\u01cd\u0005s\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ + "\u0000\u01ce\u01cf\u0006\u0010\u0000\u0000\u01cf,\u0001\u0000\u0000\u0000"+ + "\u01d0\u01d1\u0005w\u0000\u0000\u01d1\u01d2\u0005h\u0000\u0000\u01d2\u01d3"+ + "\u0005e\u0000\u0000\u01d3\u01d4\u0005r\u0000\u0000\u01d4\u01d5\u0005e"+ + "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0011"+ + "\u0000\u0000\u01d7.\u0001\u0000\u0000\u0000\u01d8\u01da\b\u0000\u0000"+ + "\u0000\u01d9\u01d8\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000"+ + "\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000"+ + "\u0000\u01dc\u01dd\u0001\u0000\u0000\u0000\u01dd\u01de\u0006\u0012\u0000"+ + "\u0000\u01de0\u0001\u0000\u0000\u0000\u01df\u01e0\u0005/\u0000\u0000\u01e0"+ + "\u01e1\u0005/\u0000\u0000\u01e1\u01e5\u0001\u0000\u0000\u0000\u01e2\u01e4"+ + "\b\u0001\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e7\u0001"+ + "\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001"+ + "\u0000\u0000\u0000\u01e6\u01e9\u0001\u0000\u0000\u0000\u01e7\u01e5\u0001"+ + "\u0000\u0000\u0000\u01e8\u01ea\u0005\r\u0000\u0000\u01e9\u01e8\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ec\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ed\u0005\n\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000"+ + "\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000"+ + "\u0000\u01ee\u01ef\u0006\u0013\b\u0000\u01ef2\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0005/\u0000\u0000\u01f1\u01f2\u0005*\u0000\u0000\u01f2\u01f7"+ + "\u0001\u0000\u0000\u0000\u01f3\u01f6\u00033\u0014\u0000\u01f4\u01f6\t"+ + "\u0000\u0000\u0000\u01f5\u01f3\u0001\u0000\u0000\u0000\u01f5\u01f4\u0001"+ + "\u0000\u0000\u0000\u01f6\u01f9\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001"+ + "\u0000\u0000\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fa\u0001"+ + "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005"+ + "*\u0000\u0000\u01fb\u01fc\u0005/\u0000\u0000\u01fc\u01fd\u0001\u0000\u0000"+ + "\u0000\u01fd\u01fe\u0006\u0014\b\u0000\u01fe4\u0001\u0000\u0000\u0000"+ + "\u01ff\u0201\u0007\u0002\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000"+ + "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ + "\u0204\u0205\u0006\u0015\b\u0000\u02056\u0001\u0000\u0000\u0000\u0206"+ + "\u0207\u0003\u00a1K\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209"+ + "\u0006\u0016\t\u0000\u0209\u020a\u0006\u0016\n\u0000\u020a8\u0001\u0000"+ + "\u0000\u0000\u020b\u020c\u0003A\u001b\u0000\u020c\u020d\u0001\u0000\u0000"+ + "\u0000\u020d\u020e\u0006\u0017\u000b\u0000\u020e\u020f\u0006\u0017\f\u0000"+ + "\u020f:\u0001\u0000\u0000\u0000\u0210\u0211\u00035\u0015\u0000\u0211\u0212"+ + "\u0001\u0000\u0000\u0000\u0212\u0213\u0006\u0018\b\u0000\u0213<\u0001"+ + "\u0000\u0000\u0000\u0214\u0215\u00031\u0013\u0000\u0215\u0216\u0001\u0000"+ + "\u0000\u0000\u0216\u0217\u0006\u0019\b\u0000\u0217>\u0001\u0000\u0000"+ + "\u0000\u0218\u0219\u00033\u0014\u0000\u0219\u021a\u0001\u0000\u0000\u0000"+ + "\u021a\u021b\u0006\u001a\b\u0000\u021b@\u0001\u0000\u0000\u0000\u021c"+ + "\u021d\u0005|\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u021f"+ + "\u0006\u001b\f\u0000\u021fB\u0001\u0000\u0000\u0000\u0220\u0221\u0007"+ + "\u0003\u0000\u0000\u0221D\u0001\u0000\u0000\u0000\u0222\u0223\u0007\u0004"+ + "\u0000\u0000\u0223F\u0001\u0000\u0000\u0000\u0224\u0225\u0005\\\u0000"+ + "\u0000\u0225\u0226\u0007\u0005\u0000\u0000\u0226H\u0001\u0000\u0000\u0000"+ + "\u0227\u0228\b\u0006\u0000\u0000\u0228J\u0001\u0000\u0000\u0000\u0229"+ + "\u022b\u0007\u0007\u0000\u0000\u022a\u022c\u0007\b\u0000\u0000\u022b\u022a"+ + "\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022e"+ + "\u0001\u0000\u0000\u0000\u022d\u022f\u0003C\u001c\u0000\u022e\u022d\u0001"+ + "\u0000\u0000\u0000\u022f\u0230\u0001\u0000\u0000\u0000\u0230\u022e\u0001"+ + "\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231L\u0001\u0000"+ + "\u0000\u0000\u0232\u0233\u0005@\u0000\u0000\u0233N\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0005`\u0000\u0000\u0235P\u0001\u0000\u0000\u0000\u0236\u023a"+ + "\b\t\u0000\u0000\u0237\u0238\u0005`\u0000\u0000\u0238\u023a\u0005`\u0000"+ + "\u0000\u0239\u0236\u0001\u0000\u0000\u0000\u0239\u0237\u0001\u0000\u0000"+ + "\u0000\u023aR\u0001\u0000\u0000\u0000\u023b\u023c\u0005_\u0000\u0000\u023c"+ + "T\u0001\u0000\u0000\u0000\u023d\u0241\u0003E\u001d\u0000\u023e\u0241\u0003"+ + "C\u001c\u0000\u023f\u0241\u0003S$\u0000\u0240\u023d\u0001\u0000\u0000"+ + "\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0240\u023f\u0001\u0000\u0000"+ + "\u0000\u0241V\u0001\u0000\u0000\u0000\u0242\u0247\u0005\"\u0000\u0000"+ + "\u0243\u0246\u0003G\u001e\u0000\u0244\u0246\u0003I\u001f\u0000\u0245\u0243"+ + "\u0001\u0000\u0000\u0000\u0245\u0244\u0001\u0000\u0000\u0000\u0246\u0249"+ + "\u0001\u0000\u0000\u0000\u0247\u0245\u0001\u0000\u0000\u0000\u0247\u0248"+ + "\u0001\u0000\u0000\u0000\u0248\u024a\u0001\u0000\u0000\u0000\u0249\u0247"+ + "\u0001\u0000\u0000\u0000\u024a\u0260\u0005\"\u0000\u0000\u024b\u024c\u0005"+ + "\"\u0000\u0000\u024c\u024d\u0005\"\u0000\u0000\u024d\u024e\u0005\"\u0000"+ + "\u0000\u024e\u0252\u0001\u0000\u0000\u0000\u024f\u0251\b\u0001\u0000\u0000"+ + "\u0250\u024f\u0001\u0000\u0000\u0000\u0251\u0254\u0001\u0000\u0000\u0000"+ + "\u0252\u0253\u0001\u0000\u0000\u0000\u0252\u0250\u0001\u0000\u0000\u0000"+ + "\u0253\u0255\u0001\u0000\u0000\u0000\u0254\u0252\u0001\u0000\u0000\u0000"+ + "\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000\u0000\u0257"+ + "\u0258\u0005\"\u0000\u0000\u0258\u025a\u0001\u0000\u0000\u0000\u0259\u025b"+ + "\u0005\"\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025a\u025b\u0001"+ + "\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025e\u0005"+ + "\"\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025d\u025e\u0001\u0000"+ + "\u0000\u0000\u025e\u0260\u0001\u0000\u0000\u0000\u025f\u0242\u0001\u0000"+ + "\u0000\u0000\u025f\u024b\u0001\u0000\u0000\u0000\u0260X\u0001\u0000\u0000"+ + "\u0000\u0261\u0263\u0003C\u001c\u0000\u0262\u0261\u0001\u0000\u0000\u0000"+ + "\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0262\u0001\u0000\u0000\u0000"+ + "\u0264\u0265\u0001\u0000\u0000\u0000\u0265Z\u0001\u0000\u0000\u0000\u0266"+ + "\u0268\u0003C\u001c\u0000\u0267\u0266\u0001\u0000\u0000\u0000\u0268\u0269"+ + "\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000\u0000\u0000\u0269\u026a"+ + "\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026f"+ + "\u0003i/\u0000\u026c\u026e\u0003C\u001c\u0000\u026d\u026c\u0001\u0000"+ + "\u0000\u0000\u026e\u0271\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000"+ + "\u0000\u0000\u026f\u0270\u0001\u0000\u0000\u0000\u0270\u0291\u0001\u0000"+ + "\u0000\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0272\u0274\u0003i/\u0000"+ + "\u0273\u0275\u0003C\u001c\u0000\u0274\u0273\u0001\u0000\u0000\u0000\u0275"+ + "\u0276\u0001\u0000\u0000\u0000\u0276\u0274\u0001\u0000\u0000\u0000\u0276"+ + "\u0277\u0001\u0000\u0000\u0000\u0277\u0291\u0001\u0000\u0000\u0000\u0278"+ + "\u027a\u0003C\u001c\u0000\u0279\u0278\u0001\u0000\u0000\u0000\u027a\u027b"+ + "\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000\u0000\u027b\u027c"+ + "\u0001\u0000\u0000\u0000\u027c\u0284\u0001\u0000\u0000\u0000\u027d\u0281"+ + "\u0003i/\u0000\u027e\u0280\u0003C\u001c\u0000\u027f\u027e\u0001\u0000"+ + "\u0000\u0000\u0280\u0283\u0001\u0000\u0000\u0000\u0281\u027f\u0001\u0000"+ + "\u0000\u0000\u0281\u0282\u0001\u0000\u0000\u0000\u0282\u0285\u0001\u0000"+ + "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0284\u027d\u0001\u0000"+ + "\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000"+ + "\u0000\u0000\u0286\u0287\u0003K \u0000\u0287\u0291\u0001\u0000\u0000\u0000"+ + "\u0288\u028a\u0003i/\u0000\u0289\u028b\u0003C\u001c\u0000\u028a\u0289"+ + "\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c\u028a"+ + "\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000\u0000\u028d\u028e"+ + "\u0001\u0000\u0000\u0000\u028e\u028f\u0003K \u0000\u028f\u0291\u0001\u0000"+ + "\u0000\u0000\u0290\u0267\u0001\u0000\u0000\u0000\u0290\u0272\u0001\u0000"+ + "\u0000\u0000\u0290\u0279\u0001\u0000\u0000\u0000\u0290\u0288\u0001\u0000"+ + "\u0000\u0000\u0291\\\u0001\u0000\u0000\u0000\u0292\u0293\u0005b\u0000"+ + "\u0000\u0293\u0294\u0005y\u0000\u0000\u0294^\u0001\u0000\u0000\u0000\u0295"+ + "\u0296\u0005a\u0000\u0000\u0296\u0297\u0005n\u0000\u0000\u0297\u0298\u0005"+ + "d\u0000\u0000\u0298`\u0001\u0000\u0000\u0000\u0299\u029a\u0005a\u0000"+ + "\u0000\u029a\u029b\u0005s\u0000\u0000\u029b\u029c\u0005c\u0000\u0000\u029c"+ + "b\u0001\u0000\u0000\u0000\u029d\u029e\u0005=\u0000\u0000\u029ed\u0001"+ + "\u0000\u0000\u0000\u029f\u02a0\u0005,\u0000\u0000\u02a0f\u0001\u0000\u0000"+ + "\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2\u02a3\u0005e\u0000\u0000\u02a3"+ + "\u02a4\u0005s\u0000\u0000\u02a4\u02a5\u0005c\u0000\u0000\u02a5h\u0001"+ + "\u0000\u0000\u0000\u02a6\u02a7\u0005.\u0000\u0000\u02a7j\u0001\u0000\u0000"+ + "\u0000\u02a8\u02a9\u0005f\u0000\u0000\u02a9\u02aa\u0005a\u0000\u0000\u02aa"+ + "\u02ab\u0005l\u0000\u0000\u02ab\u02ac\u0005s\u0000\u0000\u02ac\u02ad\u0005"+ + "e\u0000\u0000\u02adl\u0001\u0000\u0000\u0000\u02ae\u02af\u0005f\u0000"+ + "\u0000\u02af\u02b0\u0005i\u0000\u0000\u02b0\u02b1\u0005r\u0000\u0000\u02b1"+ + "\u02b2\u0005s\u0000\u0000\u02b2\u02b3\u0005t\u0000\u0000\u02b3n\u0001"+ + "\u0000\u0000\u0000\u02b4\u02b5\u0005l\u0000\u0000\u02b5\u02b6\u0005a\u0000"+ + "\u0000\u02b6\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005t\u0000\u0000\u02b8"+ + "p\u0001\u0000\u0000\u0000\u02b9\u02ba\u0005(\u0000\u0000\u02bar\u0001"+ + "\u0000\u0000\u0000\u02bb\u02bc\u0005i\u0000\u0000\u02bc\u02bd\u0005n\u0000"+ + "\u0000\u02bdt\u0001\u0000\u0000\u0000\u02be\u02bf\u0005i\u0000\u0000\u02bf"+ + "\u02c0\u0005s\u0000\u0000\u02c0v\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005"+ + "l\u0000\u0000\u02c2\u02c3\u0005i\u0000\u0000\u02c3\u02c4\u0005k\u0000"+ + "\u0000\u02c4\u02c5\u0005e\u0000\u0000\u02c5x\u0001\u0000\u0000\u0000\u02c6"+ + "\u02c7\u0005n\u0000\u0000\u02c7\u02c8\u0005o\u0000\u0000\u02c8\u02c9\u0005"+ + "t\u0000\u0000\u02c9z\u0001\u0000\u0000\u0000\u02ca\u02cb\u0005n\u0000"+ + "\u0000\u02cb\u02cc\u0005u\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd"+ + "\u02ce\u0005l\u0000\u0000\u02ce|\u0001\u0000\u0000\u0000\u02cf\u02d0\u0005"+ + "n\u0000\u0000\u02d0\u02d1\u0005u\u0000\u0000\u02d1\u02d2\u0005l\u0000"+ + "\u0000\u02d2\u02d3\u0005l\u0000\u0000\u02d3\u02d4\u0005s\u0000\u0000\u02d4"+ + "~\u0001\u0000\u0000\u0000\u02d5\u02d6\u0005o\u0000\u0000\u02d6\u02d7\u0005"+ + "r\u0000\u0000\u02d7\u0080\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005?\u0000"+ + "\u0000\u02d9\u0082\u0001\u0000\u0000\u0000\u02da\u02db\u0005r\u0000\u0000"+ + "\u02db\u02dc\u0005l\u0000\u0000\u02dc\u02dd\u0005i\u0000\u0000\u02dd\u02de"+ + "\u0005k\u0000\u0000\u02de\u02df\u0005e\u0000\u0000\u02df\u0084\u0001\u0000"+ + "\u0000\u0000\u02e0\u02e1\u0005)\u0000\u0000\u02e1\u0086\u0001\u0000\u0000"+ + "\u0000\u02e2\u02e3\u0005t\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000\u02e4"+ + "\u02e5\u0005u\u0000\u0000\u02e5\u02e6\u0005e\u0000\u0000\u02e6\u0088\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e8\u0005=\u0000\u0000\u02e8\u02e9\u0005=\u0000"+ + "\u0000\u02e9\u008a\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005=\u0000\u0000"+ + "\u02eb\u02ec\u0005~\u0000\u0000\u02ec\u008c\u0001\u0000\u0000\u0000\u02ed"+ + "\u02ee\u0005!\u0000\u0000\u02ee\u02ef\u0005=\u0000\u0000\u02ef\u008e\u0001"+ + "\u0000\u0000\u0000\u02f0\u02f1\u0005<\u0000\u0000\u02f1\u0090\u0001\u0000"+ + "\u0000\u0000\u02f2\u02f3\u0005<\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000"+ + "\u02f4\u0092\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005>\u0000\u0000\u02f6"+ + "\u0094\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005>\u0000\u0000\u02f8\u02f9"+ + "\u0005=\u0000\u0000\u02f9\u0096\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005"+ + "+\u0000\u0000\u02fb\u0098\u0001\u0000\u0000\u0000\u02fc\u02fd\u0005-\u0000"+ + "\u0000\u02fd\u009a\u0001\u0000\u0000\u0000\u02fe\u02ff\u0005*\u0000\u0000"+ + "\u02ff\u009c\u0001\u0000\u0000\u0000\u0300\u0301\u0005/\u0000\u0000\u0301"+ + "\u009e\u0001\u0000\u0000\u0000\u0302\u0303\u0005%\u0000\u0000\u0303\u00a0"+ + "\u0001\u0000\u0000\u0000\u0304\u0305\u0005[\u0000\u0000\u0305\u0306\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0006K\u0000\u0000\u0307\u0308\u0006K\u0000"+ + "\u0000\u0308\u00a2\u0001\u0000\u0000\u0000\u0309\u030a\u0005]\u0000\u0000"+ + "\u030a\u030b\u0001\u0000\u0000\u0000\u030b\u030c\u0006L\f\u0000\u030c"+ + "\u030d\u0006L\f\u0000\u030d\u00a4\u0001\u0000\u0000\u0000\u030e\u0312"+ + "\u0003E\u001d\u0000\u030f\u0311\u0003U%\u0000\u0310\u030f\u0001\u0000"+ + "\u0000\u0000\u0311\u0314\u0001\u0000\u0000\u0000\u0312\u0310\u0001\u0000"+ + "\u0000\u0000\u0312\u0313\u0001\u0000\u0000\u0000\u0313\u031f\u0001\u0000"+ + "\u0000\u0000\u0314\u0312\u0001\u0000\u0000\u0000\u0315\u0318\u0003S$\u0000"+ + "\u0316\u0318\u0003M!\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0316"+ + "\u0001\u0000\u0000\u0000\u0318\u031a\u0001\u0000\u0000\u0000\u0319\u031b"+ + "\u0003U%\u0000\u031a\u0319\u0001\u0000\u0000\u0000\u031b\u031c\u0001\u0000"+ + "\u0000\u0000\u031c\u031a\u0001\u0000\u0000\u0000\u031c\u031d\u0001\u0000"+ + "\u0000\u0000\u031d\u031f\u0001\u0000\u0000\u0000\u031e\u030e\u0001\u0000"+ + "\u0000\u0000\u031e\u0317\u0001\u0000\u0000\u0000\u031f\u00a6\u0001\u0000"+ + "\u0000\u0000\u0320\u0322\u0003O\"\u0000\u0321\u0323\u0003Q#\u0000\u0322"+ + "\u0321\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324"+ + "\u0322\u0001\u0000\u0000\u0000\u0324\u0325\u0001\u0000\u0000\u0000\u0325"+ + "\u0326\u0001\u0000\u0000\u0000\u0326\u0327\u0003O\"\u0000\u0327\u00a8"+ + "\u0001\u0000\u0000\u0000\u0328\u0329\u00031\u0013\u0000\u0329\u032a\u0001"+ + "\u0000\u0000\u0000\u032a\u032b\u0006O\b\u0000\u032b\u00aa\u0001\u0000"+ + "\u0000\u0000\u032c\u032d\u00033\u0014\u0000\u032d\u032e\u0001\u0000\u0000"+ + "\u0000\u032e\u032f\u0006P\b\u0000\u032f\u00ac\u0001\u0000\u0000\u0000"+ + "\u0330\u0331\u00035\u0015\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332"+ + "\u0333\u0006Q\b\u0000\u0333\u00ae\u0001\u0000\u0000\u0000\u0334\u0335"+ + "\u0003A\u001b\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0006"+ + "R\u000b\u0000\u0337\u0338\u0006R\f\u0000\u0338\u00b0\u0001\u0000\u0000"+ + "\u0000\u0339\u033a\u0003\u00a1K\u0000\u033a\u033b\u0001\u0000\u0000\u0000"+ + "\u033b\u033c\u0006S\t\u0000\u033c\u00b2\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0003\u00a3L\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340"+ + "\u0006T\r\u0000\u0340\u00b4\u0001\u0000\u0000\u0000\u0341\u0342\u0003"+ + "e-\u0000\u0342\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006U\u000e"+ + "\u0000\u0344\u00b6\u0001\u0000\u0000\u0000\u0345\u0346\u0003c,\u0000\u0346"+ + "\u0347\u0001\u0000\u0000\u0000\u0347\u0348\u0006V\u000f\u0000\u0348\u00b8"+ + "\u0001\u0000\u0000\u0000\u0349\u034a\u0005m\u0000\u0000\u034a\u034b\u0005"+ + "e\u0000\u0000\u034b\u034c\u0005t\u0000\u0000\u034c\u034d\u0005a\u0000"+ + "\u0000\u034d\u034e\u0005d\u0000\u0000\u034e\u034f\u0005a\u0000\u0000\u034f"+ + "\u0350\u0005t\u0000\u0000\u0350\u0351\u0005a\u0000\u0000\u0351\u00ba\u0001"+ + "\u0000\u0000\u0000\u0352\u0356\b\n\u0000\u0000\u0353\u0354\u0005/\u0000"+ + "\u0000\u0354\u0356\b\u000b\u0000\u0000\u0355\u0352\u0001\u0000\u0000\u0000"+ + "\u0355\u0353\u0001\u0000\u0000\u0000\u0356\u00bc\u0001\u0000\u0000\u0000"+ + "\u0357\u0359\u0003\u00bbX\u0000\u0358\u0357\u0001\u0000\u0000\u0000\u0359"+ + "\u035a\u0001\u0000\u0000\u0000\u035a\u0358\u0001\u0000\u0000\u0000\u035a"+ + "\u035b\u0001\u0000\u0000\u0000\u035b\u00be\u0001\u0000\u0000\u0000\u035c"+ + "\u035d\u0003\u00a7N\u0000\u035d\u035e\u0001\u0000\u0000\u0000\u035e\u035f"+ + "\u0006Z\u0010\u0000\u035f\u00c0\u0001\u0000\u0000\u0000\u0360\u0361\u0003"+ + "1\u0013\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006[\b"+ + "\u0000\u0363\u00c2\u0001\u0000\u0000\u0000\u0364\u0365\u00033\u0014\u0000"+ + "\u0365\u0366\u0001\u0000\u0000\u0000\u0366\u0367\u0006\\\b\u0000\u0367"+ + "\u00c4\u0001\u0000\u0000\u0000\u0368\u0369\u00035\u0015\u0000\u0369\u036a"+ + "\u0001\u0000\u0000\u0000\u036a\u036b\u0006]\b\u0000\u036b\u00c6\u0001"+ + "\u0000\u0000\u0000\u036c\u036d\u0003A\u001b\u0000\u036d\u036e\u0001\u0000"+ + "\u0000\u0000\u036e\u036f\u0006^\u000b\u0000\u036f\u0370\u0006^\f\u0000"+ + "\u0370\u00c8\u0001\u0000\u0000\u0000\u0371\u0372\u0003i/\u0000\u0372\u0373"+ + "\u0001\u0000\u0000\u0000\u0373\u0374\u0006_\u0011\u0000\u0374\u00ca\u0001"+ + "\u0000\u0000\u0000\u0375\u0376\u0003e-\u0000\u0376\u0377\u0001\u0000\u0000"+ + "\u0000\u0377\u0378\u0006`\u000e\u0000\u0378\u00cc\u0001\u0000\u0000\u0000"+ + "\u0379\u037e\u0003E\u001d\u0000\u037a\u037e\u0003C\u001c\u0000\u037b\u037e"+ + "\u0003S$\u0000\u037c\u037e\u0003\u009bH\u0000\u037d\u0379\u0001\u0000"+ + "\u0000\u0000\u037d\u037a\u0001\u0000\u0000\u0000\u037d\u037b\u0001\u0000"+ + "\u0000\u0000\u037d\u037c\u0001\u0000\u0000\u0000\u037e\u00ce\u0001\u0000"+ + "\u0000\u0000\u037f\u0382\u0003E\u001d\u0000\u0380\u0382\u0003\u009bH\u0000"+ + "\u0381\u037f\u0001\u0000\u0000\u0000\u0381\u0380\u0001\u0000\u0000\u0000"+ + "\u0382\u0386\u0001\u0000\u0000\u0000\u0383\u0385\u0003\u00cda\u0000\u0384"+ + "\u0383\u0001\u0000\u0000\u0000\u0385\u0388\u0001\u0000\u0000\u0000\u0386"+ + "\u0384\u0001\u0000\u0000\u0000\u0386\u0387\u0001\u0000\u0000\u0000\u0387"+ + "\u0393\u0001\u0000\u0000\u0000\u0388\u0386\u0001\u0000\u0000\u0000\u0389"+ + "\u038c\u0003S$\u0000\u038a\u038c\u0003M!\u0000\u038b\u0389\u0001\u0000"+ + "\u0000\u0000\u038b\u038a\u0001\u0000\u0000\u0000\u038c\u038e\u0001\u0000"+ + "\u0000\u0000\u038d\u038f\u0003\u00cda\u0000\u038e\u038d\u0001\u0000\u0000"+ + "\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u038e\u0001\u0000\u0000"+ + "\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u0393\u0001\u0000\u0000"+ + "\u0000\u0392\u0381\u0001\u0000\u0000\u0000\u0392\u038b\u0001\u0000\u0000"+ + "\u0000\u0393\u00d0\u0001\u0000\u0000\u0000\u0394\u0395\u0003\u00cfb\u0000"+ + "\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0397\u0006c\u0012\u0000\u0397"+ + "\u00d2\u0001\u0000\u0000\u0000\u0398\u0399\u0003\u00a7N\u0000\u0399\u039a"+ + "\u0001\u0000\u0000\u0000\u039a\u039b\u0006d\u0010\u0000\u039b\u00d4\u0001"+ + "\u0000\u0000\u0000\u039c\u039d\u00031\u0013\u0000\u039d\u039e\u0001\u0000"+ + "\u0000\u0000\u039e\u039f\u0006e\b\u0000\u039f\u00d6\u0001\u0000\u0000"+ + "\u0000\u03a0\u03a1\u00033\u0014\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000"+ + "\u03a2\u03a3\u0006f\b\u0000\u03a3\u00d8\u0001\u0000\u0000\u0000\u03a4"+ + "\u03a5\u00035\u0015\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7"+ + "\u0006g\b\u0000\u03a7\u00da\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003"+ + "A\u001b\u0000\u03a9\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006h\u000b"+ + "\u0000\u03ab\u03ac\u0006h\f\u0000\u03ac\u00dc\u0001\u0000\u0000\u0000"+ + "\u03ad\u03ae\u0003c,\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0"+ + "\u0006i\u000f\u0000\u03b0\u00de\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003"+ + "e-\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006j\u000e"+ + "\u0000\u03b4\u00e0\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003i/\u0000\u03b6"+ + "\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006k\u0011\u0000\u03b8\u00e2"+ + "\u0001\u0000\u0000\u0000\u03b9\u03ba\u0005a\u0000\u0000\u03ba\u03bb\u0005"+ + "s\u0000\u0000\u03bb\u00e4\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003\u00a7"+ + "N\u0000\u03bd\u03be\u0001\u0000\u0000\u0000\u03be\u03bf\u0006m\u0010\u0000"+ + "\u03bf\u00e6\u0001\u0000\u0000\u0000\u03c0\u03c1\u0003\u00cfb\u0000\u03c1"+ + "\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3\u0006n\u0012\u0000\u03c3\u00e8"+ + "\u0001\u0000\u0000\u0000\u03c4\u03c5\u00031\u0013\u0000\u03c5\u03c6\u0001"+ + "\u0000\u0000\u0000\u03c6\u03c7\u0006o\b\u0000\u03c7\u00ea\u0001\u0000"+ + "\u0000\u0000\u03c8\u03c9\u00033\u0014\u0000\u03c9\u03ca\u0001\u0000\u0000"+ + "\u0000\u03ca\u03cb\u0006p\b\u0000\u03cb\u00ec\u0001\u0000\u0000\u0000"+ + "\u03cc\u03cd\u00035\u0015\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce"+ + "\u03cf\u0006q\b\u0000\u03cf\u00ee\u0001\u0000\u0000\u0000\u03d0\u03d1"+ + "\u0003A\u001b\u0000\u03d1\u03d2\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006"+ + "r\u000b\u0000\u03d3\u03d4\u0006r\f\u0000\u03d4\u00f0\u0001\u0000\u0000"+ + "\u0000\u03d5\u03d6\u0003\u00a1K\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d7\u03d8\u0006s\t\u0000\u03d8\u03d9\u0006s\u0013\u0000\u03d9\u00f2"+ + "\u0001\u0000\u0000\u0000\u03da\u03db\u0005o\u0000\u0000\u03db\u03dc\u0005"+ + "n\u0000\u0000\u03dc\u03dd\u0001\u0000\u0000\u0000\u03dd\u03de\u0006t\u0014"+ + "\u0000\u03de\u00f4\u0001\u0000\u0000\u0000\u03df\u03e0\u0005w\u0000\u0000"+ + "\u03e0\u03e1\u0005i\u0000\u0000\u03e1\u03e2\u0005t\u0000\u0000\u03e2\u03e3"+ + "\u0005h\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006"+ + "u\u0014\u0000\u03e5\u00f6\u0001\u0000\u0000\u0000\u03e6\u03e7\b\f\u0000"+ + "\u0000\u03e7\u00f8\u0001\u0000\u0000\u0000\u03e8\u03eb\u0003E\u001d\u0000"+ + "\u03e9\u03eb\u0003C\u001c\u0000\u03ea\u03e8\u0001\u0000\u0000\u0000\u03ea"+ + "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ef\u0001\u0000\u0000\u0000\u03ec"+ + "\u03ee\u0003\u00f7v\u0000\u03ed\u03ec\u0001\u0000\u0000\u0000\u03ee\u03f1"+ + "\u0001\u0000\u0000\u0000\u03ef\u03ed\u0001\u0000\u0000\u0000\u03ef\u03f0"+ + "\u0001\u0000\u0000\u0000\u03f0\u00fa\u0001\u0000\u0000\u0000\u03f1\u03ef"+ + "\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003\u00a7N\u0000\u03f3\u03f4\u0001"+ + "\u0000\u0000\u0000\u03f4\u03f5\u0006x\u0010\u0000\u03f5\u00fc\u0001\u0000"+ + "\u0000\u0000\u03f6\u03f7\u0003\u00f9w\u0000\u03f7\u03f8\u0001\u0000\u0000"+ + "\u0000\u03f8\u03f9\u0006y\u0015\u0000\u03f9\u00fe\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u00031\u0013\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0006z\b\u0000\u03fd\u0100\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u00033\u0014\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ + "{\b\u0000\u0401\u0102\u0001\u0000\u0000\u0000\u0402\u0403\u00035\u0015"+ + "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006|\b\u0000"+ + "\u0405\u0104\u0001\u0000\u0000\u0000\u0406\u0407\u0003A\u001b\u0000\u0407"+ + "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006}\u000b\u0000\u0409\u040a"+ + "\u0006}\f\u0000\u040a\u040b\u0006}\f\u0000\u040b\u0106\u0001\u0000\u0000"+ + "\u0000\u040c\u040d\u0003c,\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e"+ + "\u040f\u0006~\u000f\u0000\u040f\u0108\u0001\u0000\u0000\u0000\u0410\u0411"+ + "\u0003e-\u0000\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006\u007f"+ + "\u000e\u0000\u0413\u010a\u0001\u0000\u0000\u0000\u0414\u0415\u0003i/\u0000"+ + "\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417\u0006\u0080\u0011\u0000"+ + "\u0417\u010c\u0001\u0000\u0000\u0000\u0418\u0419\u0003\u00f5u\u0000\u0419"+ + "\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u0081\u0016\u0000\u041b"+ + "\u010e\u0001\u0000\u0000\u0000\u041c\u041d\u0003\u00cfb\u0000\u041d\u041e"+ + "\u0001\u0000\u0000\u0000\u041e\u041f\u0006\u0082\u0012\u0000\u041f\u0110"+ + "\u0001\u0000\u0000\u0000\u0420\u0421\u0003\u00a7N\u0000\u0421\u0422\u0001"+ + "\u0000\u0000\u0000\u0422\u0423\u0006\u0083\u0010\u0000\u0423\u0112\u0001"+ + "\u0000\u0000\u0000\u0424\u0425\u00031\u0013\u0000\u0425\u0426\u0001\u0000"+ + "\u0000\u0000\u0426\u0427\u0006\u0084\b\u0000\u0427\u0114\u0001\u0000\u0000"+ + "\u0000\u0428\u0429\u00033\u0014\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ + "\u042a\u042b\u0006\u0085\b\u0000\u042b\u0116\u0001\u0000\u0000\u0000\u042c"+ + "\u042d\u00035\u0015\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e\u042f"+ + "\u0006\u0086\b\u0000\u042f\u0118\u0001\u0000\u0000\u0000\u0430\u0431\u0003"+ + "A\u001b\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0433\u0006\u0087"+ + "\u000b\u0000\u0433\u0434\u0006\u0087\f\u0000\u0434\u011a\u0001\u0000\u0000"+ + "\u0000\u0435\u0436\u0003i/\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437"+ + "\u0438\u0006\u0088\u0011\u0000\u0438\u011c\u0001\u0000\u0000\u0000\u0439"+ + "\u043a\u0003\u00a7N\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c"+ + "\u0006\u0089\u0010\u0000\u043c\u011e\u0001\u0000\u0000\u0000\u043d\u043e"+ + "\u0003\u00a5M\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006"+ + "\u008a\u0017\u0000\u0440\u0120\u0001\u0000\u0000\u0000\u0441\u0442\u0003"+ + "1\u0013\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006\u008b"+ + "\b\u0000\u0444\u0122\u0001\u0000\u0000\u0000\u0445\u0446\u00033\u0014"+ + "\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006\u008c\b\u0000"+ + "\u0448\u0124\u0001\u0000\u0000\u0000\u0449\u044a\u00035\u0015\u0000\u044a"+ + "\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u008d\b\u0000\u044c\u0126"+ + "\u0001\u0000\u0000\u0000\u044d\u044e\u0003A\u001b\u0000\u044e\u044f\u0001"+ + "\u0000\u0000\u0000\u044f\u0450\u0006\u008e\u000b\u0000\u0450\u0451\u0006"+ + "\u008e\f\u0000\u0451\u0128\u0001\u0000\u0000\u0000\u0452\u0453\u0005i"+ + "\u0000\u0000\u0453\u0454\u0005n\u0000\u0000\u0454\u0455\u0005f\u0000\u0000"+ + "\u0455\u0456\u0005o\u0000\u0000\u0456\u012a\u0001\u0000\u0000\u0000\u0457"+ + "\u0458\u0005f\u0000\u0000\u0458\u0459\u0005u\u0000\u0000\u0459\u045a\u0005"+ + "n\u0000\u0000\u045a\u045b\u0005c\u0000\u0000\u045b\u045c\u0005t\u0000"+ + "\u0000\u045c\u045d\u0005i\u0000\u0000\u045d\u045e\u0005o\u0000\u0000\u045e"+ + "\u045f\u0005n\u0000\u0000\u045f\u0460\u0005s\u0000\u0000\u0460\u012c\u0001"+ + "\u0000\u0000\u0000\u0461\u0462\u00031\u0013\u0000\u0462\u0463\u0001\u0000"+ + "\u0000\u0000\u0463\u0464\u0006\u0091\b\u0000\u0464\u012e\u0001\u0000\u0000"+ + "\u0000\u0465\u0466\u00033\u0014\u0000\u0466\u0467\u0001\u0000\u0000\u0000"+ + "\u0467\u0468\u0006\u0092\b\u0000\u0468\u0130\u0001\u0000\u0000\u0000\u0469"+ + "\u046a\u00035\u0015\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b\u046c"+ + "\u0006\u0093\b\u0000\u046c\u0132\u0001\u0000\u0000\u0000\u046d\u046e\u0003"+ + "\u00a3L\u0000\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u0470\u0006\u0094"+ + "\r\u0000\u0470\u0471\u0006\u0094\f\u0000\u0471\u0134\u0001\u0000\u0000"+ + "\u0000\u0472\u0473\u0005:\u0000\u0000\u0473\u0136\u0001\u0000\u0000\u0000"+ + "\u0474\u047a\u0003M!\u0000\u0475\u047a\u0003C\u001c\u0000\u0476\u047a"+ + "\u0003i/\u0000\u0477\u047a\u0003E\u001d\u0000\u0478\u047a\u0003S$\u0000"+ + "\u0479\u0474\u0001\u0000\u0000\u0000\u0479\u0475\u0001\u0000\u0000\u0000"+ + "\u0479\u0476\u0001\u0000\u0000\u0000\u0479\u0477\u0001\u0000\u0000\u0000"+ + "\u0479\u0478\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000"+ + "\u047b\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000"+ + "\u047c\u0138\u0001\u0000\u0000\u0000\u047d\u047e\u00031\u0013\u0000\u047e"+ + "\u047f\u0001\u0000\u0000\u0000\u047f\u0480\u0006\u0097\b\u0000\u0480\u013a"+ + "\u0001\u0000\u0000\u0000\u0481\u0482\u00033\u0014\u0000\u0482\u0483\u0001"+ + "\u0000\u0000\u0000\u0483\u0484\u0006\u0098\b\u0000\u0484\u013c\u0001\u0000"+ + "\u0000\u0000\u0485\u0486\u00035\u0015\u0000\u0486\u0487\u0001\u0000\u0000"+ + "\u0000\u0487\u0488\u0006\u0099\b\u0000\u0488\u013e\u0001\u0000\u0000\u0000"+ + "6\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u01db\u01e5\u01e9"+ + "\u01ec\u01f5\u01f7\u0202\u022b\u0230\u0239\u0240\u0245\u0247\u0252\u025a"+ + "\u025d\u025f\u0264\u0269\u026f\u0276\u027b\u0281\u0284\u028c\u0290\u0312"+ + "\u0317\u031c\u031e\u0324\u0355\u035a\u037d\u0381\u0386\u038b\u0390\u0392"+ + "\u03ea\u03ef\u0479\u047b\u0018\u0005\u0002\u0000\u0005\u0004\u0000\u0005"+ + "\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005"+ + "\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007@\u0000\u0005\u0000\u0000"+ + "\u0007\u001a\u0000\u0004\u0000\u0000\u0007A\u0000\u0007\"\u0000\u0007"+ + "!\u0000\u0007C\u0000\u0007$\u0000\u0007L\u0000\u0005\n\u0000\u0005\u0007"+ + "\u0000\u0007V\u0000\u0007U\u0000\u0007B\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 3acc73b1b592c..424662cd9626f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -52,6 +52,7 @@ null ')' 'true' '==' +'=~' '!=' '<' '<=' @@ -93,11 +94,17 @@ null null null null +null 'info' 'functions' null null null +':' +null +null +null +null token symbolic names: null @@ -153,6 +160,7 @@ RLIKE RP TRUE EQ +CIEQ NEQ LT LTE @@ -175,7 +183,7 @@ FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS -PROJECT_UNQUOTED_IDENTIFIER +UNQUOTED_ID_PATTERN PROJECT_LINE_COMMENT PROJECT_MULTILINE_COMMENT PROJECT_WS @@ -185,6 +193,7 @@ RENAME_MULTILINE_COMMENT RENAME_WS ON WITH +ENRICH_POLICY_NAME ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT ENRICH_WS @@ -199,6 +208,11 @@ FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +COLON +SETTING +SETTING_LINE_COMMENT +SETTTING_MULTILINE_COMMENT +SETTING_WS rule names: singleStatement @@ -220,7 +234,6 @@ metadata evalCommand statsCommand inlinestatsCommand -grouping fromIdentifier qualifiedName qualifiedNamePattern @@ -250,7 +263,8 @@ subqueryExpression showCommand enrichCommand enrichWithClause +setting atn: -[4, 1, 98, 519, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 304, 8, 19, 10, 19, 12, 19, 307, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 22, 5, 22, 322, 8, 22, 10, 22, 12, 22, 325, 9, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 355, 8, 25, 10, 25, 12, 25, 358, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 366, 8, 25, 10, 25, 12, 25, 369, 9, 25, 1, 25, 1, 25, 3, 25, 373, 8, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 3, 28, 389, 8, 28, 1, 28, 1, 28, 3, 28, 393, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 399, 8, 29, 10, 29, 12, 29, 402, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 408, 8, 29, 10, 29, 12, 29, 411, 9, 29, 3, 29, 413, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 419, 8, 30, 10, 30, 12, 30, 422, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 441, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 5, 36, 453, 8, 36, 10, 36, 12, 36, 456, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 3, 39, 466, 8, 39, 1, 40, 3, 40, 469, 8, 40, 1, 40, 1, 40, 1, 41, 3, 41, 474, 8, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 493, 8, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 499, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 505, 8, 47, 10, 47, 12, 47, 508, 9, 47, 3, 47, 510, 8, 47, 1, 48, 1, 48, 1, 48, 3, 48, 515, 8, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 66, 66, 75, 75, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 57, 548, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 308, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 372, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 377, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 412, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 423, 1, 0, 0, 0, 64, 432, 1, 0, 0, 0, 66, 436, 1, 0, 0, 0, 68, 442, 1, 0, 0, 0, 70, 446, 1, 0, 0, 0, 72, 449, 1, 0, 0, 0, 74, 457, 1, 0, 0, 0, 76, 461, 1, 0, 0, 0, 78, 465, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 473, 1, 0, 0, 0, 84, 477, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 481, 1, 0, 0, 0, 90, 484, 1, 0, 0, 0, 92, 492, 1, 0, 0, 0, 94, 494, 1, 0, 0, 0, 96, 514, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 26, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 88, 44, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 92, 46, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 52, 26, 0, 121, 132, 3, 58, 29, 0, 122, 132, 3, 54, 27, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 60, 30, 0, 126, 132, 3, 62, 31, 0, 127, 132, 3, 66, 33, 0, 128, 132, 3, 68, 34, 0, 129, 132, 3, 94, 47, 0, 130, 132, 3, 70, 35, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 18, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 44, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 44, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 41, 0, 0, 146, 147, 5, 40, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 34, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 50, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 42, 0, 0, 159, 161, 5, 44, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 31, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 47, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 44, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 43, 0, 0, 182, 183, 3, 84, 42, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 44, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 49, 0, 0, 189, 190, 3, 84, 42, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 86, 43, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 50, 25, 0, 218, 225, 3, 42, 21, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 40, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 50, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 46, 23, 0, 227, 237, 5, 40, 0, 0, 228, 238, 5, 60, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 34, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 50, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 14, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 34, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 42, 21, 0, 254, 255, 5, 33, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 40, 20, 0, 261, 262, 5, 34, 0, 0, 262, 264, 3, 40, 20, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 63, 0, 0, 272, 273, 5, 70, 0, 0, 273, 278, 3, 40, 20, 0, 274, 275, 5, 34, 0, 0, 275, 277, 3, 40, 20, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 64, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 17, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 30, 0, 0, 291, 293, 3, 38, 19, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 30, 0, 0, 297, 299, 3, 38, 19, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 305, 3, 42, 21, 0, 301, 302, 5, 34, 0, 0, 302, 304, 3, 42, 21, 0, 303, 301, 1, 0, 0, 0, 304, 307, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 39, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 308, 309, 7, 2, 0, 0, 309, 41, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 36, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 323, 3, 48, 24, 0, 319, 320, 5, 36, 0, 0, 320, 322, 3, 48, 24, 0, 321, 319, 1, 0, 0, 0, 322, 325, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 45, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 326, 327, 7, 3, 0, 0, 327, 47, 1, 0, 0, 0, 328, 329, 7, 4, 0, 0, 329, 49, 1, 0, 0, 0, 330, 373, 5, 45, 0, 0, 331, 332, 3, 82, 41, 0, 332, 333, 5, 65, 0, 0, 333, 373, 1, 0, 0, 0, 334, 373, 3, 80, 40, 0, 335, 373, 3, 82, 41, 0, 336, 373, 3, 76, 38, 0, 337, 373, 5, 48, 0, 0, 338, 373, 3, 84, 42, 0, 339, 340, 5, 63, 0, 0, 340, 345, 3, 78, 39, 0, 341, 342, 5, 34, 0, 0, 342, 344, 3, 78, 39, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 348, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 5, 64, 0, 0, 349, 373, 1, 0, 0, 0, 350, 351, 5, 63, 0, 0, 351, 356, 3, 76, 38, 0, 352, 353, 5, 34, 0, 0, 353, 355, 3, 76, 38, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 359, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 360, 5, 64, 0, 0, 360, 373, 1, 0, 0, 0, 361, 362, 5, 63, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 64, 0, 0, 371, 373, 1, 0, 0, 0, 372, 330, 1, 0, 0, 0, 372, 331, 1, 0, 0, 0, 372, 334, 1, 0, 0, 0, 372, 335, 1, 0, 0, 0, 372, 336, 1, 0, 0, 0, 372, 337, 1, 0, 0, 0, 372, 338, 1, 0, 0, 0, 372, 339, 1, 0, 0, 0, 372, 350, 1, 0, 0, 0, 372, 361, 1, 0, 0, 0, 373, 51, 1, 0, 0, 0, 374, 375, 5, 10, 0, 0, 375, 376, 5, 28, 0, 0, 376, 53, 1, 0, 0, 0, 377, 378, 5, 16, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 34, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 388, 3, 10, 5, 0, 387, 389, 7, 5, 0, 0, 388, 387, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 391, 5, 46, 0, 0, 391, 393, 7, 6, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 57, 1, 0, 0, 0, 394, 395, 5, 9, 0, 0, 395, 400, 3, 44, 22, 0, 396, 397, 5, 34, 0, 0, 397, 399, 3, 44, 22, 0, 398, 396, 1, 0, 0, 0, 399, 402, 1, 0, 0, 0, 400, 398, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 413, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 403, 404, 5, 12, 0, 0, 404, 409, 3, 44, 22, 0, 405, 406, 5, 34, 0, 0, 406, 408, 3, 44, 22, 0, 407, 405, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 412, 394, 1, 0, 0, 0, 412, 403, 1, 0, 0, 0, 413, 59, 1, 0, 0, 0, 414, 415, 5, 2, 0, 0, 415, 420, 3, 44, 22, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 44, 22, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 61, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 13, 0, 0, 424, 429, 3, 64, 32, 0, 425, 426, 5, 34, 0, 0, 426, 428, 3, 64, 32, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 63, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 3, 44, 22, 0, 433, 434, 5, 79, 0, 0, 434, 435, 3, 44, 22, 0, 435, 65, 1, 0, 0, 0, 436, 437, 5, 1, 0, 0, 437, 438, 3, 18, 9, 0, 438, 440, 3, 84, 42, 0, 439, 441, 3, 72, 36, 0, 440, 439, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 67, 1, 0, 0, 0, 442, 443, 5, 7, 0, 0, 443, 444, 3, 18, 9, 0, 444, 445, 3, 84, 42, 0, 445, 69, 1, 0, 0, 0, 446, 447, 5, 11, 0, 0, 447, 448, 3, 42, 21, 0, 448, 71, 1, 0, 0, 0, 449, 454, 3, 74, 37, 0, 450, 451, 5, 34, 0, 0, 451, 453, 3, 74, 37, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 73, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 3, 46, 23, 0, 458, 459, 5, 33, 0, 0, 459, 460, 3, 50, 25, 0, 460, 75, 1, 0, 0, 0, 461, 462, 7, 7, 0, 0, 462, 77, 1, 0, 0, 0, 463, 466, 3, 80, 40, 0, 464, 466, 3, 82, 41, 0, 465, 463, 1, 0, 0, 0, 465, 464, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 469, 7, 0, 0, 0, 468, 467, 1, 0, 0, 0, 468, 469, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 5, 29, 0, 0, 471, 81, 1, 0, 0, 0, 472, 474, 7, 0, 0, 0, 473, 472, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 5, 28, 0, 0, 476, 83, 1, 0, 0, 0, 477, 478, 5, 27, 0, 0, 478, 85, 1, 0, 0, 0, 479, 480, 7, 8, 0, 0, 480, 87, 1, 0, 0, 0, 481, 482, 5, 5, 0, 0, 482, 483, 3, 90, 45, 0, 483, 89, 1, 0, 0, 0, 484, 485, 5, 63, 0, 0, 485, 486, 3, 2, 1, 0, 486, 487, 5, 64, 0, 0, 487, 91, 1, 0, 0, 0, 488, 489, 5, 15, 0, 0, 489, 493, 5, 94, 0, 0, 490, 491, 5, 15, 0, 0, 491, 493, 5, 95, 0, 0, 492, 488, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 93, 1, 0, 0, 0, 494, 495, 5, 3, 0, 0, 495, 498, 3, 40, 20, 0, 496, 497, 5, 83, 0, 0, 497, 499, 3, 44, 22, 0, 498, 496, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 509, 1, 0, 0, 0, 500, 501, 5, 84, 0, 0, 501, 506, 3, 96, 48, 0, 502, 503, 5, 34, 0, 0, 503, 505, 3, 96, 48, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 510, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 500, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 95, 1, 0, 0, 0, 511, 512, 3, 44, 22, 0, 512, 513, 5, 33, 0, 0, 513, 515, 1, 0, 0, 0, 514, 511, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 3, 44, 22, 0, 517, 97, 1, 0, 0, 0, 52, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 305, 315, 323, 345, 356, 367, 372, 383, 388, 392, 400, 409, 412, 420, 429, 440, 454, 465, 468, 473, 492, 498, 506, 509, 514] \ No newline at end of file +[4, 1, 105, 523, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 306, 8, 20, 10, 20, 12, 20, 309, 9, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 347, 8, 24, 10, 24, 12, 24, 350, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 358, 8, 24, 10, 24, 12, 24, 361, 9, 24, 1, 24, 1, 24, 3, 24, 365, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 374, 8, 26, 10, 26, 12, 26, 377, 9, 26, 1, 27, 1, 27, 3, 27, 381, 8, 27, 1, 27, 1, 27, 3, 27, 385, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 391, 8, 28, 10, 28, 12, 28, 394, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 400, 8, 28, 10, 28, 12, 28, 403, 9, 28, 3, 28, 405, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 411, 8, 29, 10, 29, 12, 29, 414, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 420, 8, 30, 10, 30, 12, 30, 423, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 433, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 5, 35, 445, 8, 35, 10, 35, 12, 35, 448, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 3, 38, 458, 8, 38, 1, 39, 3, 39, 461, 8, 39, 1, 39, 1, 39, 1, 40, 3, 40, 466, 8, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 485, 8, 45, 1, 46, 1, 46, 5, 46, 489, 8, 46, 10, 46, 12, 46, 492, 9, 46, 1, 46, 1, 46, 1, 46, 3, 46, 497, 8, 46, 1, 46, 1, 46, 1, 46, 1, 46, 5, 46, 503, 8, 46, 10, 46, 12, 46, 506, 9, 46, 3, 46, 508, 8, 46, 1, 47, 1, 47, 1, 47, 3, 47, 513, 8, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 72, 72, 1, 0, 66, 67, 2, 0, 67, 67, 76, 76, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 58, 552, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 302, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 364, 1, 0, 0, 0, 50, 366, 1, 0, 0, 0, 52, 369, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 404, 1, 0, 0, 0, 58, 406, 1, 0, 0, 0, 60, 415, 1, 0, 0, 0, 62, 424, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 434, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 441, 1, 0, 0, 0, 72, 449, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 465, 1, 0, 0, 0, 82, 469, 1, 0, 0, 0, 84, 471, 1, 0, 0, 0, 86, 473, 1, 0, 0, 0, 88, 476, 1, 0, 0, 0, 90, 484, 1, 0, 0, 0, 92, 486, 1, 0, 0, 0, 94, 512, 1, 0, 0, 0, 96, 516, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 26, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 86, 43, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 90, 45, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 50, 25, 0, 121, 132, 3, 56, 28, 0, 122, 132, 3, 52, 26, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 58, 29, 0, 126, 132, 3, 60, 30, 0, 127, 132, 3, 64, 32, 0, 128, 132, 3, 66, 33, 0, 129, 132, 3, 92, 46, 0, 130, 132, 3, 68, 34, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 18, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 44, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 44, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 41, 0, 0, 146, 147, 5, 40, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 34, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 50, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 42, 0, 0, 159, 161, 5, 44, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 31, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 47, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 44, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 43, 0, 0, 182, 183, 3, 82, 41, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 44, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 49, 0, 0, 189, 190, 3, 82, 41, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 84, 42, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 48, 24, 0, 218, 225, 3, 40, 20, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 40, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 50, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 44, 22, 0, 227, 237, 5, 40, 0, 0, 228, 238, 5, 61, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 34, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 50, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 14, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 34, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 40, 20, 0, 254, 255, 5, 33, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 38, 19, 0, 261, 262, 5, 34, 0, 0, 262, 264, 3, 38, 19, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 64, 0, 0, 272, 273, 5, 71, 0, 0, 273, 278, 3, 38, 19, 0, 274, 275, 5, 34, 0, 0, 275, 277, 3, 38, 19, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 65, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 17, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 30, 0, 0, 291, 293, 3, 24, 12, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 30, 0, 0, 297, 299, 3, 24, 12, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 301, 7, 2, 0, 0, 301, 39, 1, 0, 0, 0, 302, 307, 3, 44, 22, 0, 303, 304, 5, 36, 0, 0, 304, 306, 3, 44, 22, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 41, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 36, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 319, 7, 3, 0, 0, 319, 45, 1, 0, 0, 0, 320, 321, 7, 4, 0, 0, 321, 47, 1, 0, 0, 0, 322, 365, 5, 45, 0, 0, 323, 324, 3, 80, 40, 0, 324, 325, 5, 66, 0, 0, 325, 365, 1, 0, 0, 0, 326, 365, 3, 78, 39, 0, 327, 365, 3, 80, 40, 0, 328, 365, 3, 74, 37, 0, 329, 365, 5, 48, 0, 0, 330, 365, 3, 82, 41, 0, 331, 332, 5, 64, 0, 0, 332, 337, 3, 76, 38, 0, 333, 334, 5, 34, 0, 0, 334, 336, 3, 76, 38, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 340, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 341, 5, 65, 0, 0, 341, 365, 1, 0, 0, 0, 342, 343, 5, 64, 0, 0, 343, 348, 3, 74, 37, 0, 344, 345, 5, 34, 0, 0, 345, 347, 3, 74, 37, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 352, 5, 65, 0, 0, 352, 365, 1, 0, 0, 0, 353, 354, 5, 64, 0, 0, 354, 359, 3, 82, 41, 0, 355, 356, 5, 34, 0, 0, 356, 358, 3, 82, 41, 0, 357, 355, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 362, 363, 5, 65, 0, 0, 363, 365, 1, 0, 0, 0, 364, 322, 1, 0, 0, 0, 364, 323, 1, 0, 0, 0, 364, 326, 1, 0, 0, 0, 364, 327, 1, 0, 0, 0, 364, 328, 1, 0, 0, 0, 364, 329, 1, 0, 0, 0, 364, 330, 1, 0, 0, 0, 364, 331, 1, 0, 0, 0, 364, 342, 1, 0, 0, 0, 364, 353, 1, 0, 0, 0, 365, 49, 1, 0, 0, 0, 366, 367, 5, 10, 0, 0, 367, 368, 5, 28, 0, 0, 368, 51, 1, 0, 0, 0, 369, 370, 5, 16, 0, 0, 370, 375, 3, 54, 27, 0, 371, 372, 5, 34, 0, 0, 372, 374, 3, 54, 27, 0, 373, 371, 1, 0, 0, 0, 374, 377, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 53, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 378, 380, 3, 10, 5, 0, 379, 381, 7, 5, 0, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 46, 0, 0, 383, 385, 7, 6, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 55, 1, 0, 0, 0, 386, 387, 5, 9, 0, 0, 387, 392, 3, 42, 21, 0, 388, 389, 5, 34, 0, 0, 389, 391, 3, 42, 21, 0, 390, 388, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 405, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 395, 396, 5, 12, 0, 0, 396, 401, 3, 42, 21, 0, 397, 398, 5, 34, 0, 0, 398, 400, 3, 42, 21, 0, 399, 397, 1, 0, 0, 0, 400, 403, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 404, 386, 1, 0, 0, 0, 404, 395, 1, 0, 0, 0, 405, 57, 1, 0, 0, 0, 406, 407, 5, 2, 0, 0, 407, 412, 3, 42, 21, 0, 408, 409, 5, 34, 0, 0, 409, 411, 3, 42, 21, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 59, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 5, 13, 0, 0, 416, 421, 3, 62, 31, 0, 417, 418, 5, 34, 0, 0, 418, 420, 3, 62, 31, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 61, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 425, 3, 42, 21, 0, 425, 426, 5, 80, 0, 0, 426, 427, 3, 42, 21, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 1, 0, 0, 429, 430, 3, 18, 9, 0, 430, 432, 3, 82, 41, 0, 431, 433, 3, 70, 35, 0, 432, 431, 1, 0, 0, 0, 432, 433, 1, 0, 0, 0, 433, 65, 1, 0, 0, 0, 434, 435, 5, 7, 0, 0, 435, 436, 3, 18, 9, 0, 436, 437, 3, 82, 41, 0, 437, 67, 1, 0, 0, 0, 438, 439, 5, 11, 0, 0, 439, 440, 3, 40, 20, 0, 440, 69, 1, 0, 0, 0, 441, 446, 3, 72, 36, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 72, 36, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 71, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 3, 44, 22, 0, 450, 451, 5, 33, 0, 0, 451, 452, 3, 48, 24, 0, 452, 73, 1, 0, 0, 0, 453, 454, 7, 7, 0, 0, 454, 75, 1, 0, 0, 0, 455, 458, 3, 78, 39, 0, 456, 458, 3, 80, 40, 0, 457, 455, 1, 0, 0, 0, 457, 456, 1, 0, 0, 0, 458, 77, 1, 0, 0, 0, 459, 461, 7, 0, 0, 0, 460, 459, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 5, 29, 0, 0, 463, 79, 1, 0, 0, 0, 464, 466, 7, 0, 0, 0, 465, 464, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 5, 28, 0, 0, 468, 81, 1, 0, 0, 0, 469, 470, 5, 27, 0, 0, 470, 83, 1, 0, 0, 0, 471, 472, 7, 8, 0, 0, 472, 85, 1, 0, 0, 0, 473, 474, 5, 5, 0, 0, 474, 475, 3, 88, 44, 0, 475, 87, 1, 0, 0, 0, 476, 477, 5, 64, 0, 0, 477, 478, 3, 2, 1, 0, 478, 479, 5, 65, 0, 0, 479, 89, 1, 0, 0, 0, 480, 481, 5, 15, 0, 0, 481, 485, 5, 96, 0, 0, 482, 483, 5, 15, 0, 0, 483, 485, 5, 97, 0, 0, 484, 480, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 91, 1, 0, 0, 0, 486, 490, 5, 3, 0, 0, 487, 489, 3, 96, 48, 0, 488, 487, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 493, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 496, 5, 86, 0, 0, 494, 495, 5, 84, 0, 0, 495, 497, 3, 42, 21, 0, 496, 494, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 507, 1, 0, 0, 0, 498, 499, 5, 85, 0, 0, 499, 504, 3, 94, 47, 0, 500, 501, 5, 34, 0, 0, 501, 503, 3, 94, 47, 0, 502, 500, 1, 0, 0, 0, 503, 506, 1, 0, 0, 0, 504, 502, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 507, 498, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 93, 1, 0, 0, 0, 509, 510, 3, 42, 21, 0, 510, 511, 5, 33, 0, 0, 511, 513, 1, 0, 0, 0, 512, 509, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 3, 42, 21, 0, 515, 95, 1, 0, 0, 0, 516, 517, 5, 64, 0, 0, 517, 518, 5, 102, 0, 0, 518, 519, 5, 101, 0, 0, 519, 520, 5, 102, 0, 0, 520, 521, 5, 65, 0, 0, 521, 97, 1, 0, 0, 0, 52, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 307, 315, 337, 348, 359, 364, 375, 380, 384, 392, 401, 404, 412, 421, 432, 446, 457, 460, 465, 484, 490, 496, 504, 507, 512] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 54ec466de9623..846a28cccc817 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -24,48 +24,49 @@ public class EsqlBaseParser extends Parser { PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, EQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, - CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, - EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, PROJECT_UNQUOTED_IDENTIFIER=75, - PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, - AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_LINE_COMMENT=85, ENRICH_MULTILINE_COMMENT=86, ENRICH_WS=87, - ENRICH_FIELD_LINE_COMMENT=88, ENRICH_FIELD_MULTILINE_COMMENT=89, ENRICH_FIELD_WS=90, - MVEXPAND_LINE_COMMENT=91, MVEXPAND_MULTILINE_COMMENT=92, MVEXPAND_WS=93, - INFO=94, FUNCTIONS=95, SHOW_LINE_COMMENT=96, SHOW_MULTILINE_COMMENT=97, - SHOW_WS=98; + RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, + GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, + FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, UNQUOTED_ID_PATTERN=76, + PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, + AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, + ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, + ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, + ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, + MVEXPAND_WS=95, INFO=96, FUNCTIONS=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, + SHOW_WS=100, COLON=101, SETTING=102, SETTING_LINE_COMMENT=103, SETTTING_MULTILINE_COMMENT=104, + SETTING_WS=105; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12, RULE_field = 13, RULE_fromCommand = 14, RULE_metadata = 15, RULE_evalCommand = 16, - RULE_statsCommand = 17, RULE_inlinestatsCommand = 18, RULE_grouping = 19, - RULE_fromIdentifier = 20, RULE_qualifiedName = 21, RULE_qualifiedNamePattern = 22, - RULE_identifier = 23, RULE_identifierPattern = 24, RULE_constant = 25, - RULE_limitCommand = 26, RULE_sortCommand = 27, RULE_orderExpression = 28, - RULE_keepCommand = 29, RULE_dropCommand = 30, RULE_renameCommand = 31, - RULE_renameClause = 32, RULE_dissectCommand = 33, RULE_grokCommand = 34, - RULE_mvExpandCommand = 35, RULE_commandOptions = 36, RULE_commandOption = 37, - RULE_booleanValue = 38, RULE_numericValue = 39, RULE_decimalValue = 40, - RULE_integerValue = 41, RULE_string = 42, RULE_comparisonOperator = 43, - RULE_explainCommand = 44, RULE_subqueryExpression = 45, RULE_showCommand = 46, - RULE_enrichCommand = 47, RULE_enrichWithClause = 48; + RULE_statsCommand = 17, RULE_inlinestatsCommand = 18, RULE_fromIdentifier = 19, + RULE_qualifiedName = 20, RULE_qualifiedNamePattern = 21, RULE_identifier = 22, + RULE_identifierPattern = 23, RULE_constant = 24, RULE_limitCommand = 25, + RULE_sortCommand = 26, RULE_orderExpression = 27, RULE_keepCommand = 28, + RULE_dropCommand = 29, RULE_renameCommand = 30, RULE_renameClause = 31, + RULE_dissectCommand = 32, RULE_grokCommand = 33, RULE_mvExpandCommand = 34, + RULE_commandOptions = 35, RULE_commandOption = 36, RULE_booleanValue = 37, + RULE_numericValue = 38, RULE_decimalValue = 39, RULE_integerValue = 40, + RULE_string = 41, RULE_comparisonOperator = 42, RULE_explainCommand = 43, + RULE_subqueryExpression = 44, RULE_showCommand = 45, RULE_enrichCommand = 46, + RULE_enrichWithClause = 47, RULE_setting = 48; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "rowCommand", "fields", "field", "fromCommand", "metadata", "evalCommand", "statsCommand", "inlinestatsCommand", - "grouping", "fromIdentifier", "qualifiedName", "qualifiedNamePattern", - "identifier", "identifierPattern", "constant", "limitCommand", "sortCommand", - "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", - "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", - "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", - "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause" + "fromIdentifier", "qualifiedName", "qualifiedNamePattern", "identifier", + "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", + "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", + "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", + "booleanValue", "numericValue", "decimalValue", "integerValue", "string", + "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", + "enrichCommand", "enrichWithClause", "setting" }; } public static final String[] ruleNames = makeRuleNames(); @@ -78,11 +79,12 @@ private static String[] makeLiteralNames() { null, null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'!='", "'<'", "'<='", - "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, - null, null, null, null, "'metadata'", null, null, null, null, null, null, - null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, - null, null, null, null, null, null, "'info'", "'functions'" + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", "'functions'", + null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -95,17 +97,18 @@ private static String[] makeSymbolicNames() { "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS" + "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", + "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "UNQUOTED_ID_PATTERN", "PROJECT_LINE_COMMENT", + "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", + "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -1342,7 +1345,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE setState(207); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -2074,13 +2077,15 @@ public final EvalCommandContext evalCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class StatsCommandContext extends ParserRuleContext { + public FieldsContext stats; + public FieldsContext grouping; public TerminalNode STATS() { return getToken(EsqlBaseParser.STATS, 0); } - public FieldsContext fields() { - return getRuleContext(FieldsContext.class,0); - } public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } - public GroupingContext grouping() { - return getRuleContext(GroupingContext.class,0); + public List fields() { + return getRuleContexts(FieldsContext.class); + } + public FieldsContext fields(int i) { + return getRuleContext(FieldsContext.class,i); } @SuppressWarnings("this-escape") public StatsCommandContext(ParserRuleContext parent, int invokingState) { @@ -2116,7 +2121,7 @@ public final StatsCommandContext statsCommand() throws RecognitionException { case 1: { setState(287); - fields(); + ((StatsCommandContext)_localctx).stats = fields(); } break; } @@ -2128,7 +2133,7 @@ public final StatsCommandContext statsCommand() throws RecognitionException { setState(290); match(BY); setState(291); - grouping(); + ((StatsCommandContext)_localctx).grouping = fields(); } break; } @@ -2147,14 +2152,16 @@ public final StatsCommandContext statsCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class InlinestatsCommandContext extends ParserRuleContext { + public FieldsContext stats; + public FieldsContext grouping; public TerminalNode INLINESTATS() { return getToken(EsqlBaseParser.INLINESTATS, 0); } - public FieldsContext fields() { - return getRuleContext(FieldsContext.class,0); + public List fields() { + return getRuleContexts(FieldsContext.class); } - public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } - public GroupingContext grouping() { - return getRuleContext(GroupingContext.class,0); + public FieldsContext fields(int i) { + return getRuleContext(FieldsContext.class,i); } + public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } @SuppressWarnings("this-escape") public InlinestatsCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2184,7 +2191,7 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx setState(294); match(INLINESTATS); setState(295); - fields(); + ((InlinestatsCommandContext)_localctx).stats = fields(); setState(298); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { @@ -2193,7 +2200,7 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx setState(296); match(BY); setState(297); - grouping(); + ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; } @@ -2210,78 +2217,6 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } - @SuppressWarnings("CheckReturnValue") - public static class GroupingContext extends ParserRuleContext { - public List qualifiedName() { - return getRuleContexts(QualifiedNameContext.class); - } - public QualifiedNameContext qualifiedName(int i) { - return getRuleContext(QualifiedNameContext.class,i); - } - public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(EsqlBaseParser.COMMA, i); - } - @SuppressWarnings("this-escape") - public GroupingContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_grouping; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterGrouping(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitGrouping(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitGrouping(this); - else return visitor.visitChildren(this); - } - } - - public final GroupingContext grouping() throws RecognitionException { - GroupingContext _localctx = new GroupingContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_grouping); - try { - int _alt; - enterOuterAlt(_localctx, 1); - { - setState(300); - qualifiedName(); - setState(305); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(301); - match(COMMA); - setState(302); - qualifiedName(); - } - } - } - setState(307); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - @SuppressWarnings("CheckReturnValue") public static class FromIdentifierContext extends ParserRuleContext { public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } @@ -2308,12 +2243,12 @@ public T accept(ParseTreeVisitor visitor) { public final FromIdentifierContext fromIdentifier() throws RecognitionException { FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_fromIdentifier); + enterRule(_localctx, 38, RULE_fromIdentifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(308); + setState(300); _la = _input.LA(1); if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2370,30 +2305,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_qualifiedName); + enterRule(_localctx, 40, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(310); + setState(302); identifier(); - setState(315); + setState(307); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(311); + setState(303); match(DOT); - setState(312); + setState(304); identifier(); } } } - setState(317); + setState(309); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } } } @@ -2442,30 +2377,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_qualifiedNamePattern); + enterRule(_localctx, 42, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(318); + setState(310); identifierPattern(); - setState(323); + setState(315); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(319); + setState(311); match(DOT); - setState(320); + setState(312); identifierPattern(); } } } - setState(325); + setState(317); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } } } @@ -2506,12 +2441,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_identifier); + enterRule(_localctx, 44, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(326); + setState(318); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2536,7 +2471,7 @@ public final IdentifierContext identifier() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class IdentifierPatternContext extends ParserRuleContext { - public TerminalNode PROJECT_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.PROJECT_UNQUOTED_IDENTIFIER, 0); } + public TerminalNode UNQUOTED_ID_PATTERN() { return getToken(EsqlBaseParser.UNQUOTED_ID_PATTERN, 0); } public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") public IdentifierPatternContext(ParserRuleContext parent, int invokingState) { @@ -2560,14 +2495,14 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_identifierPattern); + enterRule(_localctx, 46, RULE_identifierPattern); int _la; try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(320); _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==PROJECT_UNQUOTED_IDENTIFIER) ) { + if ( !(_la==QUOTED_IDENTIFIER || _la==UNQUOTED_ID_PATTERN) ) { _errHandler.recoverInline(this); } else { @@ -2839,17 +2774,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_constant); + enterRule(_localctx, 48, RULE_constant); int _la; try { - setState(372); + setState(364); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(330); + setState(322); match(NULL); } break; @@ -2857,9 +2792,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(331); + setState(323); integerValue(); - setState(332); + setState(324); match(UNQUOTED_IDENTIFIER); } break; @@ -2867,7 +2802,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(334); + setState(326); decimalValue(); } break; @@ -2875,7 +2810,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(335); + setState(327); integerValue(); } break; @@ -2883,7 +2818,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(336); + setState(328); booleanValue(); } break; @@ -2891,7 +2826,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(337); + setState(329); match(PARAM); } break; @@ -2899,7 +2834,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(338); + setState(330); string(); } break; @@ -2907,27 +2842,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(339); + setState(331); match(OPENING_BRACKET); - setState(340); + setState(332); numericValue(); - setState(345); + setState(337); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(341); + setState(333); match(COMMA); - setState(342); + setState(334); numericValue(); } } - setState(347); + setState(339); _errHandler.sync(this); _la = _input.LA(1); } - setState(348); + setState(340); match(CLOSING_BRACKET); } break; @@ -2935,27 +2870,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(350); + setState(342); match(OPENING_BRACKET); - setState(351); + setState(343); booleanValue(); - setState(356); + setState(348); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(352); + setState(344); match(COMMA); - setState(353); + setState(345); booleanValue(); } } - setState(358); + setState(350); _errHandler.sync(this); _la = _input.LA(1); } - setState(359); + setState(351); match(CLOSING_BRACKET); } break; @@ -2963,27 +2898,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(361); + setState(353); match(OPENING_BRACKET); - setState(362); + setState(354); string(); - setState(367); + setState(359); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(363); + setState(355); match(COMMA); - setState(364); + setState(356); string(); } } - setState(369); + setState(361); _errHandler.sync(this); _la = _input.LA(1); } - setState(370); + setState(362); match(CLOSING_BRACKET); } break; @@ -3026,13 +2961,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_limitCommand); + enterRule(_localctx, 50, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(374); + setState(366); match(LIMIT); - setState(375); + setState(367); match(INTEGER_LITERAL); } } @@ -3082,32 +3017,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_sortCommand); + enterRule(_localctx, 52, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(377); + setState(369); match(SORT); - setState(378); + setState(370); orderExpression(); - setState(383); + setState(375); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(379); + setState(371); match(COMMA); - setState(380); + setState(372); orderExpression(); } } } - setState(385); + setState(377); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } } } @@ -3156,19 +3091,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_orderExpression); + enterRule(_localctx, 54, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(386); + setState(378); booleanExpression(0); - setState(388); + setState(380); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(387); + setState(379); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3182,14 +3117,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(392); + setState(384); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(390); + setState(382); match(NULLS); - setState(391); + setState(383); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3252,63 +3187,63 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_keepCommand); + enterRule(_localctx, 56, RULE_keepCommand); try { int _alt; - setState(412); + setState(404); _errHandler.sync(this); switch (_input.LA(1)) { case KEEP: enterOuterAlt(_localctx, 1); { - setState(394); + setState(386); match(KEEP); - setState(395); + setState(387); qualifiedNamePattern(); - setState(400); + setState(392); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(396); + setState(388); match(COMMA); - setState(397); + setState(389); qualifiedNamePattern(); } } } - setState(402); + setState(394); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } break; case PROJECT: enterOuterAlt(_localctx, 2); { - setState(403); + setState(395); match(PROJECT); - setState(404); + setState(396); qualifiedNamePattern(); - setState(409); + setState(401); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(405); + setState(397); match(COMMA); - setState(406); + setState(398); qualifiedNamePattern(); } } } - setState(411); + setState(403); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } break; @@ -3362,32 +3297,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_dropCommand); + enterRule(_localctx, 58, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(414); + setState(406); match(DROP); - setState(415); + setState(407); qualifiedNamePattern(); - setState(420); + setState(412); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,39,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(416); + setState(408); match(COMMA); - setState(417); + setState(409); qualifiedNamePattern(); } } } - setState(422); + setState(414); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } } } @@ -3437,32 +3372,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_renameCommand); + enterRule(_localctx, 60, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(423); + setState(415); match(RENAME); - setState(424); + setState(416); renameClause(); - setState(429); + setState(421); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(425); + setState(417); match(COMMA); - setState(426); + setState(418); renameClause(); } } } - setState(431); + setState(423); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3510,15 +3445,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_renameClause); + enterRule(_localctx, 62, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(432); + setState(424); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(433); + setState(425); match(AS); - setState(434); + setState(426); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3567,22 +3502,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_dissectCommand); + enterRule(_localctx, 64, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(436); + setState(428); match(DISSECT); - setState(437); + setState(429); primaryExpression(); - setState(438); + setState(430); string(); - setState(440); + setState(432); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(439); + setState(431); commandOptions(); } break; @@ -3631,15 +3566,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_grokCommand); + enterRule(_localctx, 66, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(442); + setState(434); match(GROK); - setState(443); + setState(435); primaryExpression(); - setState(444); + setState(436); string(); } } @@ -3682,13 +3617,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_mvExpandCommand); + enterRule(_localctx, 68, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(446); + setState(438); match(MV_EXPAND); - setState(447); + setState(439); qualifiedName(); } } @@ -3737,30 +3672,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_commandOptions); + enterRule(_localctx, 70, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(449); + setState(441); commandOption(); - setState(454); + setState(446); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,42,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(450); + setState(442); match(COMMA); - setState(451); + setState(443); commandOption(); } } } - setState(456); + setState(448); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,42,_ctx); } } } @@ -3806,15 +3741,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_commandOption); + enterRule(_localctx, 72, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(449); identifier(); - setState(458); + setState(450); match(ASSIGN); - setState(459); + setState(451); constant(); } } @@ -3855,12 +3790,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_booleanValue); + enterRule(_localctx, 74, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(453); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3913,22 +3848,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_numericValue); + enterRule(_localctx, 76, RULE_numericValue); try { - setState(465); + setState(457); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(463); + setState(455); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(464); + setState(456); integerValue(); } break; @@ -3972,17 +3907,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_decimalValue); + enterRule(_localctx, 78, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(468); + setState(460); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(467); + setState(459); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3995,7 +3930,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(470); + setState(462); match(DECIMAL_LITERAL); } } @@ -4037,17 +3972,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_integerValue); + enterRule(_localctx, 80, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(465); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(472); + setState(464); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4060,7 +3995,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(475); + setState(467); match(INTEGER_LITERAL); } } @@ -4100,11 +4035,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_string); + enterRule(_localctx, 82, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(477); + setState(469); match(STRING); } } @@ -4122,6 +4057,7 @@ public final StringContext string() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class ComparisonOperatorContext extends ParserRuleContext { public TerminalNode EQ() { return getToken(EsqlBaseParser.EQ, 0); } + public TerminalNode CIEQ() { return getToken(EsqlBaseParser.CIEQ, 0); } public TerminalNode NEQ() { return getToken(EsqlBaseParser.NEQ, 0); } public TerminalNode LT() { return getToken(EsqlBaseParser.LT, 0); } public TerminalNode LTE() { return getToken(EsqlBaseParser.LTE, 0); } @@ -4149,14 +4085,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_comparisonOperator); + enterRule(_localctx, 84, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(479); + setState(471); _la = _input.LA(1); - if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 283726776524341248L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 571957152676052992L) != 0) ) { _errHandler.recoverInline(this); } else { @@ -4205,13 +4141,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_explainCommand); + enterRule(_localctx, 86, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(473); match(EXPLAIN); - setState(482); + setState(474); subqueryExpression(); } } @@ -4255,15 +4191,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_subqueryExpression); + enterRule(_localctx, 88, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(484); + setState(476); match(OPENING_BRACKET); - setState(485); + setState(477); query(0); - setState(486); + setState(478); match(CLOSING_BRACKET); } } @@ -4335,18 +4271,18 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_showCommand); + enterRule(_localctx, 90, RULE_showCommand); try { - setState(492); + setState(484); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(488); + setState(480); match(SHOW); - setState(489); + setState(481); match(INFO); } break; @@ -4354,9 +4290,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(490); + setState(482); match(SHOW); - setState(491); + setState(483); match(FUNCTIONS); } break; @@ -4375,11 +4311,15 @@ public final ShowCommandContext showCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class EnrichCommandContext extends ParserRuleContext { - public FromIdentifierContext policyName; + public Token policyName; public QualifiedNamePatternContext matchField; public TerminalNode ENRICH() { return getToken(EsqlBaseParser.ENRICH, 0); } - public FromIdentifierContext fromIdentifier() { - return getRuleContext(FromIdentifierContext.class,0); + public TerminalNode ENRICH_POLICY_NAME() { return getToken(EsqlBaseParser.ENRICH_POLICY_NAME, 0); } + public List setting() { + return getRuleContexts(SettingContext.class); + } + public SettingContext setting(int i) { + return getRuleContext(SettingContext.class,i); } public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); } @@ -4418,51 +4358,66 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_enrichCommand); + enterRule(_localctx, 92, RULE_enrichCommand); + int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(494); + setState(486); match(ENRICH); - setState(495); - ((EnrichCommandContext)_localctx).policyName = fromIdentifier(); - setState(498); + setState(490); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==OPENING_BRACKET) { + { + { + setState(487); + setting(); + } + } + setState(492); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(493); + ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); + setState(496); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(496); + setState(494); match(ON); - setState(497); + setState(495); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(509); + setState(507); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(500); + setState(498); match(WITH); - setState(501); + setState(499); enrichWithClause(); - setState(506); + setState(504); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); + setState(500); match(COMMA); - setState(503); + setState(501); enrichWithClause(); } } } - setState(508); + setState(506); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4515,23 +4470,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_enrichWithClause); + enterRule(_localctx, 94, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(514); + setState(512); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(511); + setState(509); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(512); + setState(510); match(ASSIGN); } break; } - setState(516); + setState(514); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4546,6 +4501,66 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class SettingContext extends ParserRuleContext { + public Token name; + public Token value; + public TerminalNode OPENING_BRACKET() { return getToken(EsqlBaseParser.OPENING_BRACKET, 0); } + public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } + public TerminalNode CLOSING_BRACKET() { return getToken(EsqlBaseParser.CLOSING_BRACKET, 0); } + public List SETTING() { return getTokens(EsqlBaseParser.SETTING); } + public TerminalNode SETTING(int i) { + return getToken(EsqlBaseParser.SETTING, i); + } + @SuppressWarnings("this-escape") + public SettingContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_setting; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSetting(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitSetting(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitSetting(this); + else return visitor.visitChildren(this); + } + } + + public final SettingContext setting() throws RecognitionException { + SettingContext _localctx = new SettingContext(_ctx, getState()); + enterRule(_localctx, 96, RULE_setting); + try { + enterOuterAlt(_localctx, 1); + { + setState(516); + match(OPENING_BRACKET); + setState(517); + ((SettingContext)_localctx).name = match(SETTING); + setState(518); + match(COLON); + setState(519); + ((SettingContext)_localctx).value = match(SETTING); + setState(520); + match(CLOSING_BRACKET); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -4584,7 +4599,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001b\u0207\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001i\u020b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4628,84 +4643,84 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011"+ "\u0121\b\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0125\b\u0011\u0001"+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u012b\b\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0130\b\u0013\n\u0013\f\u0013"+ - "\u0133\t\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u0132"+ + "\b\u0014\n\u0014\f\u0014\u0135\t\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ "\u0005\u0015\u013a\b\u0015\n\u0015\f\u0015\u013d\t\u0015\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0005\u0016\u0142\b\u0016\n\u0016\f\u0016\u0145\t\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ - "\u0158\b\u0019\n\u0019\f\u0019\u015b\t\u0019\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0163\b\u0019\n"+ - "\u0019\f\u0019\u0166\t\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u016e\b\u0019\n\u0019\f\u0019"+ - "\u0171\t\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u0175\b\u0019\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u0185\b\u001c\u0001\u001c\u0001\u001c\u0003\u001c"+ - "\u0189\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ - "\u018f\b\u001d\n\u001d\f\u001d\u0192\t\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0005\u001d\u0198\b\u001d\n\u001d\f\u001d\u019b\t\u001d"+ - "\u0003\u001d\u019d\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ - "\u0005\u001e\u01a3\b\u001e\n\u001e\f\u001e\u01a6\t\u001e\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01ac\b\u001f\n\u001f\f\u001f"+ - "\u01af\t\u001f\u0001 \u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001"+ - "!\u0003!\u01b9\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001"+ - "#\u0001$\u0001$\u0001$\u0005$\u01c5\b$\n$\f$\u01c8\t$\u0001%\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0003\'\u01d2\b\'\u0001(\u0003"+ - "(\u01d5\b(\u0001(\u0001(\u0001)\u0003)\u01da\b)\u0001)\u0001)\u0001*\u0001"+ - "*\u0001+\u0001+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001.\u0003.\u01ed\b.\u0001/\u0001/\u0001/\u0001/\u0003"+ - "/\u01f3\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01f9\b/\n/\f/\u01fc\t/"+ - "\u0003/\u01fe\b/\u00010\u00010\u00010\u00030\u0203\b0\u00010\u00010\u0001"+ - "0\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`\u0000\t\u0001\u0000:;\u0001\u0000<>\u0002\u0000BBGG\u0001\u0000"+ - "AB\u0002\u0000BBKK\u0002\u0000 ##\u0001\u0000&\'\u0002\u0000%%33\u0001"+ - "\u000049\u0224\u0000b\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000"+ - "\u0000\u0004t\u0001\u0000\u0000\u0000\u0006\u0083\u0001\u0000\u0000\u0000"+ - "\b\u0085\u0001\u0000\u0000\u0000\n\u00a4\u0001\u0000\u0000\u0000\f\u00bf"+ - "\u0001\u0000\u0000\u0000\u000e\u00c6\u0001\u0000\u0000\u0000\u0010\u00cc"+ - "\u0001\u0000\u0000\u0000\u0012\u00e0\u0001\u0000\u0000\u0000\u0014\u00e2"+ - "\u0001\u0000\u0000\u0000\u0016\u00f1\u0001\u0000\u0000\u0000\u0018\u00f4"+ - "\u0001\u0000\u0000\u0000\u001a\u0101\u0001\u0000\u0000\u0000\u001c\u0103"+ - "\u0001\u0000\u0000\u0000\u001e\u010f\u0001\u0000\u0000\u0000 \u011b\u0001"+ - "\u0000\u0000\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0126\u0001\u0000"+ - "\u0000\u0000&\u012c\u0001\u0000\u0000\u0000(\u0134\u0001\u0000\u0000\u0000"+ - "*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000.\u0146"+ - "\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000\u00002\u0174\u0001\u0000"+ - "\u0000\u00004\u0176\u0001\u0000\u0000\u00006\u0179\u0001\u0000\u0000\u0000"+ - "8\u0182\u0001\u0000\u0000\u0000:\u019c\u0001\u0000\u0000\u0000<\u019e"+ - "\u0001\u0000\u0000\u0000>\u01a7\u0001\u0000\u0000\u0000@\u01b0\u0001\u0000"+ - "\u0000\u0000B\u01b4\u0001\u0000\u0000\u0000D\u01ba\u0001\u0000\u0000\u0000"+ - "F\u01be\u0001\u0000\u0000\u0000H\u01c1\u0001\u0000\u0000\u0000J\u01c9"+ - "\u0001\u0000\u0000\u0000L\u01cd\u0001\u0000\u0000\u0000N\u01d1\u0001\u0000"+ - "\u0000\u0000P\u01d4\u0001\u0000\u0000\u0000R\u01d9\u0001\u0000\u0000\u0000"+ - "T\u01dd\u0001\u0000\u0000\u0000V\u01df\u0001\u0000\u0000\u0000X\u01e1"+ - "\u0001\u0000\u0000\u0000Z\u01e4\u0001\u0000\u0000\u0000\\\u01ec\u0001"+ - "\u0000\u0000\u0000^\u01ee\u0001\u0000\u0000\u0000`\u0202\u0001\u0000\u0000"+ - "\u0000bc\u0003\u0002\u0001\u0000cd\u0005\u0000\u0000\u0001d\u0001\u0001"+ - "\u0000\u0000\u0000ef\u0006\u0001\uffff\uffff\u0000fg\u0003\u0004\u0002"+ - "\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001\u0000\u0000ij\u0005\u001a\u0000"+ - "\u0000jl\u0003\u0006\u0003\u0000kh\u0001\u0000\u0000\u0000lo\u0001\u0000"+ - "\u0000\u0000mk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000n\u0003"+ - "\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000pu\u0003X,\u0000qu\u0003"+ - "\u001c\u000e\u0000ru\u0003\u0016\u000b\u0000su\u0003\\.\u0000tp\u0001"+ - "\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000tr\u0001\u0000\u0000\u0000"+ - "ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000\u0000\u0000v\u0084\u0003"+ - " \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084\u00034\u001a\u0000y\u0084"+ - "\u0003:\u001d\u0000z\u0084\u00036\u001b\u0000{\u0084\u0003\"\u0011\u0000"+ - "|\u0084\u0003\b\u0004\u0000}\u0084\u0003<\u001e\u0000~\u0084\u0003>\u001f"+ - "\u0000\u007f\u0084\u0003B!\u0000\u0080\u0084\u0003D\"\u0000\u0081\u0084"+ - "\u0003^/\u0000\u0082\u0084\u0003F#\u0000\u0083v\u0001\u0000\u0000\u0000"+ - "\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001\u0000\u0000\u0000\u0083y"+ - "\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000\u0000\u0083{\u0001\u0000"+ - "\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083}\u0001\u0000\u0000\u0000"+ - "\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001\u0000\u0000\u0000\u0083"+ - "\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000\u0000\u0083"+ - "\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001\u0000\u0000\u0000\u0085"+ - "\u0086\u0005\u0012\u0000\u0000\u0086\u0087\u0003\n\u0005\u0000\u0087\t"+ - "\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005\uffff\uffff\u0000\u0089"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0150\b\u0018\n"+ + "\u0018\f\u0018\u0153\t\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u015b\b\u0018\n\u0018\f\u0018"+ + "\u015e\t\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0005\u0018\u0166\b\u0018\n\u0018\f\u0018\u0169\t\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u016d\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0176"+ + "\b\u001a\n\u001a\f\u001a\u0179\t\u001a\u0001\u001b\u0001\u001b\u0003\u001b"+ + "\u017d\b\u001b\u0001\u001b\u0001\u001b\u0003\u001b\u0181\b\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0187\b\u001c\n"+ + "\u001c\f\u001c\u018a\t\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001"+ + "\u001c\u0005\u001c\u0190\b\u001c\n\u001c\f\u001c\u0193\t\u001c\u0003\u001c"+ + "\u0195\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ + "\u019b\b\u001d\n\u001d\f\u001d\u019e\t\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u01a4\b\u001e\n\u001e\f\u001e\u01a7\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 "+ + "\u0001 \u0003 \u01b1\b \u0001!\u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001"+ + "\"\u0001#\u0001#\u0001#\u0005#\u01bd\b#\n#\f#\u01c0\t#\u0001$\u0001$\u0001"+ + "$\u0001$\u0001%\u0001%\u0001&\u0001&\u0003&\u01ca\b&\u0001\'\u0003\'\u01cd"+ + "\b\'\u0001\'\u0001\'\u0001(\u0003(\u01d2\b(\u0001(\u0001(\u0001)\u0001"+ + ")\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001-\u0001-\u0003-\u01e5\b-\u0001.\u0001.\u0005.\u01e9\b.\n"+ + ".\f.\u01ec\t.\u0001.\u0001.\u0001.\u0003.\u01f1\b.\u0001.\u0001.\u0001"+ + ".\u0001.\u0005.\u01f7\b.\n.\f.\u01fa\t.\u0003.\u01fc\b.\u0001/\u0001/"+ + "\u0001/\u0003/\u0201\b/\u0001/\u0001/\u00010\u00010\u00010\u00010\u0001"+ + "0\u00010\u00010\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004\u0006\b\n"+ + "\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.0246"+ + "8:<>@BDFHJLNPRTVXZ\\^`\u0000\t\u0001\u0000;<\u0001\u0000=?\u0002\u0000"+ + "CCHH\u0001\u0000BC\u0002\u0000CCLL\u0002\u0000 ##\u0001\u0000&\'\u0002"+ + "\u0000%%33\u0001\u00004:\u0228\u0000b\u0001\u0000\u0000\u0000\u0002e\u0001"+ + "\u0000\u0000\u0000\u0004t\u0001\u0000\u0000\u0000\u0006\u0083\u0001\u0000"+ + "\u0000\u0000\b\u0085\u0001\u0000\u0000\u0000\n\u00a4\u0001\u0000\u0000"+ + "\u0000\f\u00bf\u0001\u0000\u0000\u0000\u000e\u00c6\u0001\u0000\u0000\u0000"+ + "\u0010\u00cc\u0001\u0000\u0000\u0000\u0012\u00e0\u0001\u0000\u0000\u0000"+ + "\u0014\u00e2\u0001\u0000\u0000\u0000\u0016\u00f1\u0001\u0000\u0000\u0000"+ + "\u0018\u00f4\u0001\u0000\u0000\u0000\u001a\u0101\u0001\u0000\u0000\u0000"+ + "\u001c\u0103\u0001\u0000\u0000\u0000\u001e\u010f\u0001\u0000\u0000\u0000"+ + " \u011b\u0001\u0000\u0000\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0126"+ + "\u0001\u0000\u0000\u0000&\u012c\u0001\u0000\u0000\u0000(\u012e\u0001\u0000"+ + "\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000"+ + ".\u0140\u0001\u0000\u0000\u00000\u016c\u0001\u0000\u0000\u00002\u016e"+ + "\u0001\u0000\u0000\u00004\u0171\u0001\u0000\u0000\u00006\u017a\u0001\u0000"+ + "\u0000\u00008\u0194\u0001\u0000\u0000\u0000:\u0196\u0001\u0000\u0000\u0000"+ + "<\u019f\u0001\u0000\u0000\u0000>\u01a8\u0001\u0000\u0000\u0000@\u01ac"+ + "\u0001\u0000\u0000\u0000B\u01b2\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000"+ + "\u0000\u0000F\u01b9\u0001\u0000\u0000\u0000H\u01c1\u0001\u0000\u0000\u0000"+ + "J\u01c5\u0001\u0000\u0000\u0000L\u01c9\u0001\u0000\u0000\u0000N\u01cc"+ + "\u0001\u0000\u0000\u0000P\u01d1\u0001\u0000\u0000\u0000R\u01d5\u0001\u0000"+ + "\u0000\u0000T\u01d7\u0001\u0000\u0000\u0000V\u01d9\u0001\u0000\u0000\u0000"+ + "X\u01dc\u0001\u0000\u0000\u0000Z\u01e4\u0001\u0000\u0000\u0000\\\u01e6"+ + "\u0001\u0000\u0000\u0000^\u0200\u0001\u0000\u0000\u0000`\u0204\u0001\u0000"+ + "\u0000\u0000bc\u0003\u0002\u0001\u0000cd\u0005\u0000\u0000\u0001d\u0001"+ + "\u0001\u0000\u0000\u0000ef\u0006\u0001\uffff\uffff\u0000fg\u0003\u0004"+ + "\u0002\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001\u0000\u0000ij\u0005\u001a"+ + "\u0000\u0000jl\u0003\u0006\u0003\u0000kh\u0001\u0000\u0000\u0000lo\u0001"+ + "\u0000\u0000\u0000mk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000"+ + "n\u0003\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000pu\u0003V+\u0000"+ + "qu\u0003\u001c\u000e\u0000ru\u0003\u0016\u000b\u0000su\u0003Z-\u0000t"+ + "p\u0001\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000tr\u0001\u0000\u0000"+ + "\u0000ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000\u0000\u0000v\u0084"+ + "\u0003 \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084\u00032\u0019\u0000"+ + "y\u0084\u00038\u001c\u0000z\u0084\u00034\u001a\u0000{\u0084\u0003\"\u0011"+ + "\u0000|\u0084\u0003\b\u0004\u0000}\u0084\u0003:\u001d\u0000~\u0084\u0003"+ + "<\u001e\u0000\u007f\u0084\u0003@ \u0000\u0080\u0084\u0003B!\u0000\u0081"+ + "\u0084\u0003\\.\u0000\u0082\u0084\u0003D\"\u0000\u0083v\u0001\u0000\u0000"+ + "\u0000\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001\u0000\u0000\u0000\u0083"+ + "y\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000\u0000\u0083{\u0001"+ + "\u0000\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083}\u0001\u0000\u0000"+ + "\u0000\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001\u0000\u0000\u0000"+ + "\u0083\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000\u0000"+ + "\u0083\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001\u0000\u0000\u0000"+ + "\u0085\u0086\u0005\u0012\u0000\u0000\u0086\u0087\u0003\n\u0005\u0000\u0087"+ + "\t\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005\uffff\uffff\u0000\u0089"+ "\u008a\u0005,\u0000\u0000\u008a\u00a5\u0003\n\u0005\u0007\u008b\u00a5"+ "\u0003\u000e\u0007\u0000\u008c\u00a5\u0003\f\u0006\u0000\u008d\u008f\u0003"+ "\u000e\u0007\u0000\u008e\u0090\u0005,\u0000\u0000\u008f\u008e\u0001\u0000"+ @@ -4732,14 +4747,14 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0001\u0000\u0000\u0000\u00b1\u00b3\u0003\u000e\u0007\u0000\u00b2\u00b4"+ "\u0005,\u0000\u0000\u00b3\u00b2\u0001\u0000\u0000\u0000\u00b3\u00b4\u0001"+ "\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5\u00b6\u0005"+ - "+\u0000\u0000\u00b6\u00b7\u0003T*\u0000\u00b7\u00c0\u0001\u0000\u0000"+ + "+\u0000\u0000\u00b6\u00b7\u0003R)\u0000\u00b7\u00c0\u0001\u0000\u0000"+ "\u0000\u00b8\u00ba\u0003\u000e\u0007\u0000\u00b9\u00bb\u0005,\u0000\u0000"+ "\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000\u0000"+ "\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u00051\u0000\u0000\u00bd"+ - "\u00be\u0003T*\u0000\u00be\u00c0\u0001\u0000\u0000\u0000\u00bf\u00b1\u0001"+ + "\u00be\u0003R)\u0000\u00be\u00c0\u0001\u0000\u0000\u0000\u00bf\u00b1\u0001"+ "\u0000\u0000\u0000\u00bf\u00b8\u0001\u0000\u0000\u0000\u00c0\r\u0001\u0000"+ "\u0000\u0000\u00c1\u00c7\u0003\u0010\b\u0000\u00c2\u00c3\u0003\u0010\b"+ - "\u0000\u00c3\u00c4\u0003V+\u0000\u00c4\u00c5\u0003\u0010\b\u0000\u00c5"+ + "\u0000\u00c3\u00c4\u0003T*\u0000\u00c4\u00c5\u0003\u0010\b\u0000\u00c5"+ "\u00c7\u0001\u0000\u0000\u0000\u00c6\u00c1\u0001\u0000\u0000\u0000\u00c6"+ "\u00c2\u0001\u0000\u0000\u0000\u00c7\u000f\u0001\u0000\u0000\u0000\u00c8"+ "\u00c9\u0006\b\uffff\uffff\u0000\u00c9\u00cd\u0003\u0012\t\u0000\u00ca"+ @@ -4751,14 +4766,14 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\b\u0002\u00d4\u00ce\u0001\u0000\u0000\u0000\u00d4\u00d1\u0001\u0000\u0000"+ "\u0000\u00d5\u00d8\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000"+ "\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u0011\u0001\u0000\u0000"+ - "\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1\u00032\u0019\u0000"+ - "\u00da\u00e1\u0003*\u0015\u0000\u00db\u00e1\u0003\u0014\n\u0000\u00dc"+ + "\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1\u00030\u0018\u0000"+ + "\u00da\u00e1\u0003(\u0014\u0000\u00db\u00e1\u0003\u0014\n\u0000\u00dc"+ "\u00dd\u0005(\u0000\u0000\u00dd\u00de\u0003\n\u0005\u0000\u00de\u00df"+ "\u00052\u0000\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001"+ "\u0000\u0000\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0\u00db\u0001"+ "\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e1\u0013\u0001"+ - "\u0000\u0000\u0000\u00e2\u00e3\u0003.\u0017\u0000\u00e3\u00ed\u0005(\u0000"+ - "\u0000\u00e4\u00ee\u0005<\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0000"+ + "\u0000\u0000\u0000\u00e2\u00e3\u0003,\u0016\u0000\u00e3\u00ed\u0005(\u0000"+ + "\u0000\u00e4\u00ee\u0005=\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0000"+ "\u00e6\u00e7\u0005\"\u0000\u0000\u00e7\u00e9\u0003\n\u0005\u0000\u00e8"+ "\u00e6\u0001\u0000\u0000\u0000\u00e9\u00ec\u0001\u0000\u0000\u0000\u00ea"+ "\u00e8\u0001\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb"+ @@ -4772,152 +4787,154 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000"+ "\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000\u0000"+ "\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0102\u0003\n\u0005\u0000"+ - "\u00fd\u00fe\u0003*\u0015\u0000\u00fe\u00ff\u0005!\u0000\u0000\u00ff\u0100"+ + "\u00fd\u00fe\u0003(\u0014\u0000\u00fe\u00ff\u0005!\u0000\u0000\u00ff\u0100"+ "\u0003\n\u0005\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u001b\u0001"+ "\u0000\u0000\u0000\u0103\u0104\u0005\u0006\u0000\u0000\u0104\u0109\u0003"+ - "(\u0014\u0000\u0105\u0106\u0005\"\u0000\u0000\u0106\u0108\u0003(\u0014"+ + "&\u0013\u0000\u0105\u0106\u0005\"\u0000\u0000\u0106\u0108\u0003&\u0013"+ "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u010b\u0001\u0000\u0000"+ "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a\u0001\u0000\u0000"+ "\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000\u0000"+ "\u0000\u010c\u010e\u0003\u001e\u000f\u0000\u010d\u010c\u0001\u0000\u0000"+ "\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u001d\u0001\u0000\u0000"+ - "\u0000\u010f\u0110\u0005?\u0000\u0000\u0110\u0111\u0005F\u0000\u0000\u0111"+ - "\u0116\u0003(\u0014\u0000\u0112\u0113\u0005\"\u0000\u0000\u0113\u0115"+ - "\u0003(\u0014\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u010f\u0110\u0005@\u0000\u0000\u0110\u0111\u0005G\u0000\u0000\u0111"+ + "\u0116\u0003&\u0013\u0000\u0112\u0113\u0005\"\u0000\u0000\u0113\u0115"+ + "\u0003&\u0013\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ "\u0000\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ - "\u0000\u0000\u0000\u0119\u011a\u0005@\u0000\u0000\u011a\u001f\u0001\u0000"+ + "\u0000\u0000\u0000\u0119\u011a\u0005A\u0000\u0000\u011a\u001f\u0001\u0000"+ "\u0000\u0000\u011b\u011c\u0005\u0004\u0000\u0000\u011c\u011d\u0003\u0018"+ "\f\u0000\u011d!\u0001\u0000\u0000\u0000\u011e\u0120\u0005\u0011\u0000"+ "\u0000\u011f\u0121\u0003\u0018\f\u0000\u0120\u011f\u0001\u0000\u0000\u0000"+ "\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000"+ - "\u0122\u0123\u0005\u001e\u0000\u0000\u0123\u0125\u0003&\u0013\u0000\u0124"+ + "\u0122\u0123\u0005\u001e\u0000\u0000\u0123\u0125\u0003\u0018\f\u0000\u0124"+ "\u0122\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125"+ "#\u0001\u0000\u0000\u0000\u0126\u0127\u0005\b\u0000\u0000\u0127\u012a"+ "\u0003\u0018\f\u0000\u0128\u0129\u0005\u001e\u0000\u0000\u0129\u012b\u0003"+ - "&\u0013\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000"+ - "\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u0131\u0003*\u0015\u0000"+ - "\u012d\u012e\u0005\"\u0000\u0000\u012e\u0130\u0003*\u0015\u0000\u012f"+ - "\u012d\u0001\u0000\u0000\u0000\u0130\u0133\u0001\u0000\u0000\u0000\u0131"+ - "\u012f\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132"+ - "\'\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0135"+ - "\u0007\u0002\u0000\u0000\u0135)\u0001\u0000\u0000\u0000\u0136\u013b\u0003"+ + "\u0018\f\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000"+ + "\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u012d\u0007\u0002\u0000"+ + "\u0000\u012d\'\u0001\u0000\u0000\u0000\u012e\u0133\u0003,\u0016\u0000"+ + "\u012f\u0130\u0005$\u0000\u0000\u0130\u0132\u0003,\u0016\u0000\u0131\u012f"+ + "\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131"+ + "\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134)\u0001"+ + "\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u013b\u0003"+ ".\u0017\u0000\u0137\u0138\u0005$\u0000\u0000\u0138\u013a\u0003.\u0017"+ "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000"+ "\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000"+ "\u0000\u013c+\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u0143\u00030\u0018\u0000\u013f\u0140\u0005$\u0000\u0000\u0140\u0142"+ - "\u00030\u0018\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0142\u0145\u0001"+ - "\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000\u0000\u0143\u0144\u0001"+ - "\u0000\u0000\u0000\u0144-\u0001\u0000\u0000\u0000\u0145\u0143\u0001\u0000"+ - "\u0000\u0000\u0146\u0147\u0007\u0003\u0000\u0000\u0147/\u0001\u0000\u0000"+ - "\u0000\u0148\u0149\u0007\u0004\u0000\u0000\u01491\u0001\u0000\u0000\u0000"+ - "\u014a\u0175\u0005-\u0000\u0000\u014b\u014c\u0003R)\u0000\u014c\u014d"+ - "\u0005A\u0000\u0000\u014d\u0175\u0001\u0000\u0000\u0000\u014e\u0175\u0003"+ - "P(\u0000\u014f\u0175\u0003R)\u0000\u0150\u0175\u0003L&\u0000\u0151\u0175"+ - "\u00050\u0000\u0000\u0152\u0175\u0003T*\u0000\u0153\u0154\u0005?\u0000"+ - "\u0000\u0154\u0159\u0003N\'\u0000\u0155\u0156\u0005\"\u0000\u0000\u0156"+ - "\u0158\u0003N\'\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u015b"+ - "\u0001\u0000\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a"+ - "\u0001\u0000\u0000\u0000\u015a\u015c\u0001\u0000\u0000\u0000\u015b\u0159"+ - "\u0001\u0000\u0000\u0000\u015c\u015d\u0005@\u0000\u0000\u015d\u0175\u0001"+ - "\u0000\u0000\u0000\u015e\u015f\u0005?\u0000\u0000\u015f\u0164\u0003L&"+ - "\u0000\u0160\u0161\u0005\"\u0000\u0000\u0161\u0163\u0003L&\u0000\u0162"+ - "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ - "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ - "\u0167\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167"+ - "\u0168\u0005@\u0000\u0000\u0168\u0175\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0005?\u0000\u0000\u016a\u016f\u0003T*\u0000\u016b\u016c\u0005\"\u0000"+ - "\u0000\u016c\u016e\u0003T*\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016e"+ - "\u0171\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u016f"+ - "\u0170\u0001\u0000\u0000\u0000\u0170\u0172\u0001\u0000\u0000\u0000\u0171"+ - "\u016f\u0001\u0000\u0000\u0000\u0172\u0173\u0005@\u0000\u0000\u0173\u0175"+ - "\u0001\u0000\u0000\u0000\u0174\u014a\u0001\u0000\u0000\u0000\u0174\u014b"+ - "\u0001\u0000\u0000\u0000\u0174\u014e\u0001\u0000\u0000\u0000\u0174\u014f"+ - "\u0001\u0000\u0000\u0000\u0174\u0150\u0001\u0000\u0000\u0000\u0174\u0151"+ - "\u0001\u0000\u0000\u0000\u0174\u0152\u0001\u0000\u0000\u0000\u0174\u0153"+ - "\u0001\u0000\u0000\u0000\u0174\u015e\u0001\u0000\u0000\u0000\u0174\u0169"+ - "\u0001\u0000\u0000\u0000\u01753\u0001\u0000\u0000\u0000\u0176\u0177\u0005"+ - "\n\u0000\u0000\u0177\u0178\u0005\u001c\u0000\u0000\u01785\u0001\u0000"+ - "\u0000\u0000\u0179\u017a\u0005\u0010\u0000\u0000\u017a\u017f\u00038\u001c"+ - "\u0000\u017b\u017c\u0005\"\u0000\u0000\u017c\u017e\u00038\u001c\u0000"+ - "\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001\u0000\u0000\u0000"+ - "\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000"+ - "\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182"+ - "\u0184\u0003\n\u0005\u0000\u0183\u0185\u0007\u0005\u0000\u0000\u0184\u0183"+ - "\u0001\u0000\u0000\u0000\u0184\u0185\u0001\u0000\u0000\u0000\u0185\u0188"+ - "\u0001\u0000\u0000\u0000\u0186\u0187\u0005.\u0000\u0000\u0187\u0189\u0007"+ - "\u0006\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189\u0001"+ - "\u0000\u0000\u0000\u01899\u0001\u0000\u0000\u0000\u018a\u018b\u0005\t"+ - "\u0000\u0000\u018b\u0190\u0003,\u0016\u0000\u018c\u018d\u0005\"\u0000"+ - "\u0000\u018d\u018f\u0003,\u0016\u0000\u018e\u018c\u0001\u0000\u0000\u0000"+ - "\u018f\u0192\u0001\u0000\u0000\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ - "\u0190\u0191\u0001\u0000\u0000\u0000\u0191\u019d\u0001\u0000\u0000\u0000"+ - "\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0194\u0005\f\u0000\u0000\u0194"+ - "\u0199\u0003,\u0016\u0000\u0195\u0196\u0005\"\u0000\u0000\u0196\u0198"+ - "\u0003,\u0016\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0001"+ - "\u0000\u0000\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u0199\u019a\u0001"+ - "\u0000\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001"+ - "\u0000\u0000\u0000\u019c\u018a\u0001\u0000\u0000\u0000\u019c\u0193\u0001"+ - "\u0000\u0000\u0000\u019d;\u0001\u0000\u0000\u0000\u019e\u019f\u0005\u0002"+ - "\u0000\u0000\u019f\u01a4\u0003,\u0016\u0000\u01a0\u01a1\u0005\"\u0000"+ - "\u0000\u01a1\u01a3\u0003,\u0016\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000"+ - "\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5=\u0001\u0000\u0000\u0000\u01a6"+ - "\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005\r\u0000\u0000\u01a8\u01ad"+ - "\u0003@ \u0000\u01a9\u01aa\u0005\"\u0000\u0000\u01aa\u01ac\u0003@ \u0000"+ - "\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000\u0000"+ - "\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000\u0000"+ - "\u01ae?\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01b0"+ - "\u01b1\u0003,\u0016\u0000\u01b1\u01b2\u0005O\u0000\u0000\u01b2\u01b3\u0003"+ - ",\u0016\u0000\u01b3A\u0001\u0000\u0000\u0000\u01b4\u01b5\u0005\u0001\u0000"+ - "\u0000\u01b5\u01b6\u0003\u0012\t\u0000\u01b6\u01b8\u0003T*\u0000\u01b7"+ - "\u01b9\u0003H$\u0000\u01b8\u01b7\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001"+ - "\u0000\u0000\u0000\u01b9C\u0001\u0000\u0000\u0000\u01ba\u01bb\u0005\u0007"+ - "\u0000\u0000\u01bb\u01bc\u0003\u0012\t\u0000\u01bc\u01bd\u0003T*\u0000"+ - "\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bf\u0005\u000b\u0000\u0000\u01bf"+ - "\u01c0\u0003*\u0015\u0000\u01c0G\u0001\u0000\u0000\u0000\u01c1\u01c6\u0003"+ - "J%\u0000\u01c2\u01c3\u0005\"\u0000\u0000\u01c3\u01c5\u0003J%\u0000\u01c4"+ - "\u01c2\u0001\u0000\u0000\u0000\u01c5\u01c8\u0001\u0000\u0000\u0000\u01c6"+ - "\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+ - "I\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c9\u01ca"+ - "\u0003.\u0017\u0000\u01ca\u01cb\u0005!\u0000\u0000\u01cb\u01cc\u00032"+ - "\u0019\u0000\u01ccK\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007\u0007\u0000"+ - "\u0000\u01ceM\u0001\u0000\u0000\u0000\u01cf\u01d2\u0003P(\u0000\u01d0"+ - "\u01d2\u0003R)\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d1\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d2O\u0001\u0000\u0000\u0000\u01d3\u01d5\u0007\u0000"+ - "\u0000\u0000\u01d4\u01d3\u0001\u0000\u0000\u0000\u01d4\u01d5\u0001\u0000"+ - "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u001d"+ - "\u0000\u0000\u01d7Q\u0001\u0000\u0000\u0000\u01d8\u01da\u0007\u0000\u0000"+ - "\u0000\u01d9\u01d8\u0001\u0000\u0000\u0000\u01d9\u01da\u0001\u0000\u0000"+ - "\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc\u0005\u001c\u0000"+ - "\u0000\u01dcS\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u001b\u0000\u0000"+ - "\u01deU\u0001\u0000\u0000\u0000\u01df\u01e0\u0007\b\u0000\u0000\u01e0"+ - "W\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u0005\u0000\u0000\u01e2\u01e3"+ - "\u0003Z-\u0000\u01e3Y\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005?\u0000"+ - "\u0000\u01e5\u01e6\u0003\u0002\u0001\u0000\u01e6\u01e7\u0005@\u0000\u0000"+ - "\u01e7[\u0001\u0000\u0000\u0000\u01e8\u01e9\u0005\u000f\u0000\u0000\u01e9"+ - "\u01ed\u0005^\u0000\u0000\u01ea\u01eb\u0005\u000f\u0000\u0000\u01eb\u01ed"+ - "\u0005_\u0000\u0000\u01ec\u01e8\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001"+ - "\u0000\u0000\u0000\u01ed]\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u0003"+ - "\u0000\u0000\u01ef\u01f2\u0003(\u0014\u0000\u01f0\u01f1\u0005S\u0000\u0000"+ - "\u01f1\u01f3\u0003,\u0016\u0000\u01f2\u01f0\u0001\u0000\u0000\u0000\u01f2"+ - "\u01f3\u0001\u0000\u0000\u0000\u01f3\u01fd\u0001\u0000\u0000\u0000\u01f4"+ - "\u01f5\u0005T\u0000\u0000\u01f5\u01fa\u0003`0\u0000\u01f6\u01f7\u0005"+ - "\"\u0000\u0000\u01f7\u01f9\u0003`0\u0000\u01f8\u01f6\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000"+ - "\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01f4\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe_\u0001\u0000\u0000\u0000"+ - "\u01ff\u0200\u0003,\u0016\u0000\u0200\u0201\u0005!\u0000\u0000\u0201\u0203"+ - "\u0001\u0000\u0000\u0000\u0202\u01ff\u0001\u0000\u0000\u0000\u0202\u0203"+ - "\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205"+ - "\u0003,\u0016\u0000\u0205a\u0001\u0000\u0000\u00004mt\u0083\u008f\u0098"+ - "\u00a0\u00a4\u00ac\u00ae\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0"+ - "\u00ea\u00ed\u00f9\u0101\u0109\u010d\u0116\u0120\u0124\u012a\u0131\u013b"+ - "\u0143\u0159\u0164\u016f\u0174\u017f\u0184\u0188\u0190\u0199\u019c\u01a4"+ - "\u01ad\u01b8\u01c6\u01d1\u01d4\u01d9\u01ec\u01f2\u01fa\u01fd\u0202"; + "\u013e\u013f\u0007\u0003\u0000\u0000\u013f-\u0001\u0000\u0000\u0000\u0140"+ + "\u0141\u0007\u0004\u0000\u0000\u0141/\u0001\u0000\u0000\u0000\u0142\u016d"+ + "\u0005-\u0000\u0000\u0143\u0144\u0003P(\u0000\u0144\u0145\u0005B\u0000"+ + "\u0000\u0145\u016d\u0001\u0000\u0000\u0000\u0146\u016d\u0003N\'\u0000"+ + "\u0147\u016d\u0003P(\u0000\u0148\u016d\u0003J%\u0000\u0149\u016d\u0005"+ + "0\u0000\u0000\u014a\u016d\u0003R)\u0000\u014b\u014c\u0005@\u0000\u0000"+ + "\u014c\u0151\u0003L&\u0000\u014d\u014e\u0005\"\u0000\u0000\u014e\u0150"+ + "\u0003L&\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001\u0000"+ + "\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000"+ + "\u0000\u0000\u0152\u0154\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ + "\u0000\u0000\u0154\u0155\u0005A\u0000\u0000\u0155\u016d\u0001\u0000\u0000"+ + "\u0000\u0156\u0157\u0005@\u0000\u0000\u0157\u015c\u0003J%\u0000\u0158"+ + "\u0159\u0005\"\u0000\u0000\u0159\u015b\u0003J%\u0000\u015a\u0158\u0001"+ + "\u0000\u0000\u0000\u015b\u015e\u0001\u0000\u0000\u0000\u015c\u015a\u0001"+ + "\u0000\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d\u015f\u0001"+ + "\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0160\u0005"+ + "A\u0000\u0000\u0160\u016d\u0001\u0000\u0000\u0000\u0161\u0162\u0005@\u0000"+ + "\u0000\u0162\u0167\u0003R)\u0000\u0163\u0164\u0005\"\u0000\u0000\u0164"+ + "\u0166\u0003R)\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166\u0169\u0001"+ + "\u0000\u0000\u0000\u0167\u0165\u0001\u0000\u0000\u0000\u0167\u0168\u0001"+ + "\u0000\u0000\u0000\u0168\u016a\u0001\u0000\u0000\u0000\u0169\u0167\u0001"+ + "\u0000\u0000\u0000\u016a\u016b\u0005A\u0000\u0000\u016b\u016d\u0001\u0000"+ + "\u0000\u0000\u016c\u0142\u0001\u0000\u0000\u0000\u016c\u0143\u0001\u0000"+ + "\u0000\u0000\u016c\u0146\u0001\u0000\u0000\u0000\u016c\u0147\u0001\u0000"+ + "\u0000\u0000\u016c\u0148\u0001\u0000\u0000\u0000\u016c\u0149\u0001\u0000"+ + "\u0000\u0000\u016c\u014a\u0001\u0000\u0000\u0000\u016c\u014b\u0001\u0000"+ + "\u0000\u0000\u016c\u0156\u0001\u0000\u0000\u0000\u016c\u0161\u0001\u0000"+ + "\u0000\u0000\u016d1\u0001\u0000\u0000\u0000\u016e\u016f\u0005\n\u0000"+ + "\u0000\u016f\u0170\u0005\u001c\u0000\u0000\u01703\u0001\u0000\u0000\u0000"+ + "\u0171\u0172\u0005\u0010\u0000\u0000\u0172\u0177\u00036\u001b\u0000\u0173"+ + "\u0174\u0005\"\u0000\u0000\u0174\u0176\u00036\u001b\u0000\u0175\u0173"+ + "\u0001\u0000\u0000\u0000\u0176\u0179\u0001\u0000\u0000\u0000\u0177\u0175"+ + "\u0001\u0000\u0000\u0000\u0177\u0178\u0001\u0000\u0000\u0000\u01785\u0001"+ + "\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000\u017a\u017c\u0003"+ + "\n\u0005\u0000\u017b\u017d\u0007\u0005\u0000\u0000\u017c\u017b\u0001\u0000"+ + "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u0180\u0001\u0000"+ + "\u0000\u0000\u017e\u017f\u0005.\u0000\u0000\u017f\u0181\u0007\u0006\u0000"+ + "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0180\u0181\u0001\u0000\u0000"+ + "\u0000\u01817\u0001\u0000\u0000\u0000\u0182\u0183\u0005\t\u0000\u0000"+ + "\u0183\u0188\u0003*\u0015\u0000\u0184\u0185\u0005\"\u0000\u0000\u0185"+ + "\u0187\u0003*\u0015\u0000\u0186\u0184\u0001\u0000\u0000\u0000\u0187\u018a"+ + "\u0001\u0000\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189"+ + "\u0001\u0000\u0000\u0000\u0189\u0195\u0001\u0000\u0000\u0000\u018a\u0188"+ + "\u0001\u0000\u0000\u0000\u018b\u018c\u0005\f\u0000\u0000\u018c\u0191\u0003"+ + "*\u0015\u0000\u018d\u018e\u0005\"\u0000\u0000\u018e\u0190\u0003*\u0015"+ + "\u0000\u018f\u018d\u0001\u0000\u0000\u0000\u0190\u0193\u0001\u0000\u0000"+ + "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0191\u0192\u0001\u0000\u0000"+ + "\u0000\u0192\u0195\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000"+ + "\u0000\u0194\u0182\u0001\u0000\u0000\u0000\u0194\u018b\u0001\u0000\u0000"+ + "\u0000\u01959\u0001\u0000\u0000\u0000\u0196\u0197\u0005\u0002\u0000\u0000"+ + "\u0197\u019c\u0003*\u0015\u0000\u0198\u0199\u0005\"\u0000\u0000\u0199"+ + "\u019b\u0003*\u0015\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ + "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ + "\u0001\u0000\u0000\u0000\u019d;\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ + "\u0000\u0000\u0000\u019f\u01a0\u0005\r\u0000\u0000\u01a0\u01a5\u0003>"+ + "\u001f\u0000\u01a1\u01a2\u0005\"\u0000\u0000\u01a2\u01a4\u0003>\u001f"+ + "\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000"+ + "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000"+ + "\u0000\u01a6=\u0001\u0000\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000"+ + "\u01a8\u01a9\u0003*\u0015\u0000\u01a9\u01aa\u0005P\u0000\u0000\u01aa\u01ab"+ + "\u0003*\u0015\u0000\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u0001"+ + "\u0000\u0000\u01ad\u01ae\u0003\u0012\t\u0000\u01ae\u01b0\u0003R)\u0000"+ + "\u01af\u01b1\u0003F#\u0000\u01b0\u01af\u0001\u0000\u0000\u0000\u01b0\u01b1"+ + "\u0001\u0000\u0000\u0000\u01b1A\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005"+ + "\u0007\u0000\u0000\u01b3\u01b4\u0003\u0012\t\u0000\u01b4\u01b5\u0003R"+ + ")\u0000\u01b5C\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u000b\u0000\u0000"+ + "\u01b7\u01b8\u0003(\u0014\u0000\u01b8E\u0001\u0000\u0000\u0000\u01b9\u01be"+ + "\u0003H$\u0000\u01ba\u01bb\u0005\"\u0000\u0000\u01bb\u01bd\u0003H$\u0000"+ + "\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0\u0001\u0000\u0000\u0000"+ + "\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf\u0001\u0000\u0000\u0000"+ + "\u01bfG\u0001\u0000\u0000\u0000\u01c0\u01be\u0001\u0000\u0000\u0000\u01c1"+ + "\u01c2\u0003,\u0016\u0000\u01c2\u01c3\u0005!\u0000\u0000\u01c3\u01c4\u0003"+ + "0\u0018\u0000\u01c4I\u0001\u0000\u0000\u0000\u01c5\u01c6\u0007\u0007\u0000"+ + "\u0000\u01c6K\u0001\u0000\u0000\u0000\u01c7\u01ca\u0003N\'\u0000\u01c8"+ + "\u01ca\u0003P(\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01c9\u01c8\u0001"+ + "\u0000\u0000\u0000\u01caM\u0001\u0000\u0000\u0000\u01cb\u01cd\u0007\u0000"+ + "\u0000\u0000\u01cc\u01cb\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005\u001d"+ + "\u0000\u0000\u01cfO\u0001\u0000\u0000\u0000\u01d0\u01d2\u0007\u0000\u0000"+ + "\u0000\u01d1\u01d0\u0001\u0000\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000"+ + "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0005\u001c\u0000"+ + "\u0000\u01d4Q\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005\u001b\u0000\u0000"+ + "\u01d6S\u0001\u0000\u0000\u0000\u01d7\u01d8\u0007\b\u0000\u0000\u01d8"+ + "U\u0001\u0000\u0000\u0000\u01d9\u01da\u0005\u0005\u0000\u0000\u01da\u01db"+ + "\u0003X,\u0000\u01dbW\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005@\u0000"+ + "\u0000\u01dd\u01de\u0003\u0002\u0001\u0000\u01de\u01df\u0005A\u0000\u0000"+ + "\u01dfY\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005\u000f\u0000\u0000\u01e1"+ + "\u01e5\u0005`\u0000\u0000\u01e2\u01e3\u0005\u000f\u0000\u0000\u01e3\u01e5"+ + "\u0005a\u0000\u0000\u01e4\u01e0\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001"+ + "\u0000\u0000\u0000\u01e5[\u0001\u0000\u0000\u0000\u01e6\u01ea\u0005\u0003"+ + "\u0000\u0000\u01e7\u01e9\u0003`0\u0000\u01e8\u01e7\u0001\u0000\u0000\u0000"+ + "\u01e9\u01ec\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000"+ + "\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01ed\u0001\u0000\u0000\u0000"+ + "\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ed\u01f0\u0005V\u0000\u0000\u01ee"+ + "\u01ef\u0005T\u0000\u0000\u01ef\u01f1\u0003*\u0015\u0000\u01f0\u01ee\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01fb\u0001"+ + "\u0000\u0000\u0000\u01f2\u01f3\u0005U\u0000\u0000\u01f3\u01f8\u0003^/"+ + "\u0000\u01f4\u01f5\u0005\"\u0000\u0000\u01f5\u01f7\u0003^/\u0000\u01f6"+ + "\u01f4\u0001\u0000\u0000\u0000\u01f7\u01fa\u0001\u0000\u0000\u0000\u01f8"+ + "\u01f6\u0001\u0000\u0000\u0000\u01f8\u01f9\u0001\u0000\u0000\u0000\u01f9"+ + "\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fb"+ + "\u01f2\u0001\u0000\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc"+ + "]\u0001\u0000\u0000\u0000\u01fd\u01fe\u0003*\u0015\u0000\u01fe\u01ff\u0005"+ + "!\u0000\u0000\u01ff\u0201\u0001\u0000\u0000\u0000\u0200\u01fd\u0001\u0000"+ + "\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0003*\u0015\u0000\u0203_\u0001\u0000\u0000\u0000"+ + "\u0204\u0205\u0005@\u0000\u0000\u0205\u0206\u0005f\u0000\u0000\u0206\u0207"+ + "\u0005e\u0000\u0000\u0207\u0208\u0005f\u0000\u0000\u0208\u0209\u0005A"+ + "\u0000\u0000\u0209a\u0001\u0000\u0000\u00004mt\u0083\u008f\u0098\u00a0"+ + "\u00a4\u00ac\u00ae\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0\u00ea"+ + "\u00ed\u00f9\u0101\u0109\u010d\u0116\u0120\u0124\u012a\u0133\u013b\u0151"+ + "\u015c\u0167\u016c\u0177\u017c\u0180\u0188\u0191\u0194\u019c\u01a5\u01b0"+ + "\u01be\u01c9\u01cc\u01d1\u01e4\u01ea\u01f0\u01f8\u01fb\u0200"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 73b529cd2be92..40946a2236d2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -384,18 +384,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } - /** - * {@inheritDoc} - * - *

    The default implementation does nothing.

    - */ - @Override public void enterGrouping(EsqlBaseParser.GroupingContext ctx) { } - /** - * {@inheritDoc} - * - *

    The default implementation does nothing.

    - */ - @Override public void exitGrouping(EsqlBaseParser.GroupingContext ctx) { } /** * {@inheritDoc} * @@ -864,6 +852,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterSetting(EsqlBaseParser.SettingContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitSetting(EsqlBaseParser.SettingContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index d35481745cecc..43c30c0a063cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -229,13 +229,6 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

    The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

    - */ - @Override public T visitGrouping(EsqlBaseParser.GroupingContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -509,4 +502,11 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitSetting(EsqlBaseParser.SettingContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 6c8cd7272d8dc..712227ab36787 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -351,16 +351,6 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#grouping}. - * @param ctx the parse tree - */ - void enterGrouping(EsqlBaseParser.GroupingContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#grouping}. - * @param ctx the parse tree - */ - void exitGrouping(EsqlBaseParser.GroupingContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. * @param ctx the parse tree @@ -775,4 +765,14 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + */ + void enterSetting(EsqlBaseParser.SettingContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + */ + void exitSetting(EsqlBaseParser.SettingContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 2fe5de566dbaf..d5c871641f3b7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -213,12 +213,6 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#grouping}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitGrouping(EsqlBaseParser.GroupingContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. * @param ctx the parse tree @@ -465,4 +459,10 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#setting}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSetting(EsqlBaseParser.SettingContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 9875979808f0b..d4cfb6b95176b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; @@ -33,6 +34,7 @@ import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.NamedExpression; @@ -58,12 +60,12 @@ import java.time.Duration; import java.time.ZoneId; import java.time.temporal.TemporalAmount; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.function.BiFunction; -import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.parseTemporalAmout; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD; @@ -282,6 +284,7 @@ public Expression visitComparison(EsqlBaseParser.ComparisonContext ctx) { return switch (op.getSymbol().getType()) { case EsqlBaseParser.EQ -> new Equals(source, left, right, zoneId); + case EsqlBaseParser.CIEQ -> new InsensitiveEquals(source, left, right); case EsqlBaseParser.NEQ -> new Not(source, new Equals(source, left, right, zoneId)); case EsqlBaseParser.LT -> new LessThan(source, left, right, zoneId); case EsqlBaseParser.LTE -> new LessThanOrEqual(source, left, right, zoneId); @@ -409,13 +412,50 @@ private NamedExpression enrichFieldName(EsqlBaseParser.QualifiedNamePatternConte public Alias visitField(EsqlBaseParser.FieldContext ctx) { UnresolvedAttribute id = visitQualifiedName(ctx.qualifiedName()); Expression value = expression(ctx.booleanExpression()); - String name = id == null ? ctx.getText() : id.qualifiedName(); - return new Alias(source(ctx), name, value); + var source = source(ctx); + String name = id == null ? source.text() : id.qualifiedName(); + return new Alias(source, name, value); } @Override - public List visitGrouping(EsqlBaseParser.GroupingContext ctx) { - return ctx != null ? visitList(this, ctx.qualifiedName(), NamedExpression.class) : emptyList(); + public List visitFields(EsqlBaseParser.FieldsContext ctx) { + return ctx != null ? visitList(this, ctx.field(), Alias.class) : new ArrayList<>(); + } + + /** + * Similar to {@link #visitFields(EsqlBaseParser.FieldsContext)} however avoids wrapping the exception + * into an Alias. + */ + public List visitGrouping(EsqlBaseParser.FieldsContext ctx) { + List list; + if (ctx != null) { + var fields = ctx.field(); + list = new ArrayList<>(fields.size()); + for (EsqlBaseParser.FieldContext field : fields) { + NamedExpression ne = null; + UnresolvedAttribute id = visitQualifiedName(field.qualifiedName()); + Expression value = expression(field.booleanExpression()); + String name = null; + if (id == null) { + // when no alias has been specified, see if the underling one can be reused + if (value instanceof Attribute a) { + ne = a; + } else { + name = source(field).text(); + } + } else { + name = id.qualifiedName(); + } + // wrap when necessary - no alias and no underlying attribute + if (ne == null) { + ne = new Alias(source(ctx), name, value); + } + list.add(ne); + } + } else { + list = new ArrayList<>(); + } + return list; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 2039dc633f6cf..7541326c172ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -25,7 +25,7 @@ public String visitIdentifier(IdentifierContext ctx) { @Override public String visitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { - return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.PROJECT_UNQUOTED_IDENTIFIER()); + return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.UNQUOTED_ID_PATTERN()); } @Override @@ -33,7 +33,7 @@ public String visitFromIdentifier(FromIdentifierContext ctx) { return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); } - static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { + protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { String result; if (quotedNode != null) { String identifier = quotedNode.getText(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index f9d1a252afe42..23a76afe41cff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -53,12 +53,15 @@ import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; +import static org.elasticsearch.xpack.esql.plan.logical.Enrich.Mode; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.ql.parser.ParserUtils.visitList; @@ -195,17 +198,17 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { @Override public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { - List aggregates = new ArrayList<>(visitFields(ctx.fields())); - List groupings = visitGrouping(ctx.grouping()); + List aggregates = new ArrayList<>(visitFields(ctx.stats)); + List groupings = visitGrouping(ctx.grouping); if (aggregates.isEmpty() && groupings.isEmpty()) { throw new ParsingException(source(ctx), "At least one aggregation or grouping expression required in [{}]", ctx.getText()); } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { - var groupNames = Expressions.names(groupings); + var groupNames = new LinkedHashSet<>(Expressions.names(Expressions.references(groupings))); for (NamedExpression aggregate : aggregates) { - if (aggregate instanceof Alias a && a.child() instanceof UnresolvedAttribute ua && groupNames.contains(ua.name())) { + if (Alias.unwrap(aggregate) instanceof UnresolvedAttribute ua && groupNames.contains(ua.name())) { throw new ParsingException(ua.source(), "Cannot specify grouping expression [{}] as an aggregate", ua.name()); } } @@ -216,8 +219,8 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { @Override public PlanFactory visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { - List aggregates = new ArrayList<>(visitFields(ctx.fields())); - List groupings = visitGrouping(ctx.grouping()); + List aggregates = new ArrayList<>(visitFields(ctx.stats)); + List groupings = visitGrouping(ctx.grouping); aggregates.addAll(groupings); return input -> new InlineStats(source(ctx), input, new ArrayList<>(groupings), aggregates); } @@ -228,11 +231,6 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { return input -> new Filter(source(ctx), input, expression); } - @Override - public List visitFields(EsqlBaseParser.FieldsContext ctx) { - return ctx != null ? visitList(this, ctx.field(), Alias.class) : new ArrayList<>(); - } - @Override public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); @@ -311,28 +309,58 @@ public LogicalPlan visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { @Override public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { return p -> { - String policyName = visitFromIdentifier(ctx.policyName); + String policyName = ctx.policyName.getText(); var source = source(ctx); + Mode mode = enrichMode(ctx.setting()); + NamedExpression matchField = ctx.ON() != null ? visitQualifiedNamePattern(ctx.matchField) : new EmptyAttribute(source); if (matchField.name().contains("*")) { - throw new ParsingException( - source(ctx), - "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", - matchField.name() - ); + throw new ParsingException(source, "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", matchField.name()); } List keepClauses = visitList(this, ctx.enrichWithClause(), NamedExpression.class); return new Enrich( source, p, + mode, new Literal(source(ctx.policyName), policyName, DataTypes.KEYWORD), matchField, null, + Map.of(), keepClauses.isEmpty() ? List.of() : keepClauses ); }; } + private Mode enrichMode(List setting) { + if (setting == null || setting.isEmpty()) { + return null; + } + var s = setting.get(0); + var source = source(s); + if (setting.size() > 1) { + throw new ParsingException(source, "Only one setting allowed for now in ENRICH"); + } + String mode = "ccq.mode"; + + var nameText = s.name.getText(); + if (mode.equals(nameText.toLowerCase(Locale.ROOT)) == false) { + throw new ParsingException(source(s.name), "Unsupported setting [{}], expected [{}]", nameText, mode); + } + + var valueText = s.value.getText(); + Enrich.Mode m = Enrich.Mode.from(valueText); + if (m == null) { + throw new ParsingException( + source(s.value), + "Unrecognized value [{}], ENRICH [{}] needs to be one of {}", + valueText, + nameText, + Enrich.Mode.values() + ); + } + return m; + } + interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 1ad73be7902f7..d5db90aa07325 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.ql.capabilities.Resolvables; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; @@ -19,6 +20,8 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; @@ -26,22 +29,49 @@ public class Enrich extends UnaryPlan { private final Expression policyName; private final NamedExpression matchField; - private final EnrichPolicyResolution policy; - private List enrichFields; + private final EnrichPolicy policy; + private final Map concreteIndices; // cluster -> enrich indices + private final List enrichFields; private List output; + private final Mode mode; + + public enum Mode { + ANY, + COORDINATOR, + REMOTE; + + private static final Map map; + + static { + var values = Mode.values(); + map = Maps.newMapWithExpectedSize(values.length); + for (Mode m : values) { + map.put(m.name(), m); + } + } + + public static Mode from(String name) { + return name == null ? null : map.get(name.toUpperCase(Locale.ROOT)); + } + } + public Enrich( Source source, LogicalPlan child, + Mode mode, Expression policyName, NamedExpression matchField, - EnrichPolicyResolution policy, + EnrichPolicy policy, + Map concreteIndices, List enrichFields ) { super(source, child); + this.mode = mode == null ? Mode.ANY : mode; this.policyName = policyName; this.matchField = matchField; this.policy = policy; + this.concreteIndices = concreteIndices; this.enrichFields = enrichFields; } @@ -53,14 +83,22 @@ public List enrichFields() { return enrichFields; } - public EnrichPolicyResolution policy() { + public EnrichPolicy policy() { return policy; } + public Map concreteIndices() { + return concreteIndices; + } + public Expression policyName() { return policyName; } + public Mode mode() { + return mode; + } + @Override public boolean expressionsResolved() { return policyName.resolved() @@ -71,12 +109,12 @@ public boolean expressionsResolved() { @Override public UnaryPlan replaceChild(LogicalPlan newChild) { - return new Enrich(source(), newChild, policyName, matchField, policy, enrichFields); + return new Enrich(source(), newChild, mode, policyName, matchField, policy, concreteIndices, enrichFields); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Enrich::new, child(), policyName, matchField, policy, enrichFields); + return NodeInfo.create(this, Enrich::new, child(), mode, policyName, matchField, policy, concreteIndices, enrichFields); } @Override @@ -96,14 +134,16 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; Enrich enrich = (Enrich) o; - return Objects.equals(policyName, enrich.policyName) + return Objects.equals(mode, enrich.mode) + && Objects.equals(policyName, enrich.policyName) && Objects.equals(matchField, enrich.matchField) && Objects.equals(policy, enrich.policy) + && Objects.equals(concreteIndices, enrich.concreteIndices) && Objects.equals(enrichFields, enrich.enrichFields); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), policyName, matchField, policy, enrichFields); + return Objects.hash(super.hashCode(), mode, policyName, matchField, policy, concreteIndices, enrichFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java index 6c36771ddd870..5a4b90c45f23d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java @@ -39,7 +39,7 @@ public ShowFunctions(Source source) { for (var name : List.of("name", "synopsis", "argNames", "argTypes", "argDescriptions", "returnType", "description")) { attributes.add(new ReferenceAttribute(Source.EMPTY, name, KEYWORD)); } - for (var name : List.of("optionalArgs", "variadic")) { + for (var name : List.of("optionalArgs", "variadic", "isAggregation")) { attributes.add(new ReferenceAttribute(Source.EMPTY, name, BOOLEAN)); } } @@ -63,6 +63,7 @@ public List> values(FunctionRegistry functionRegistry) { row.add(signature.description()); row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::optional)); row.add(signature.variadic()); + row.add(signature.isAggregation()); rows.add(row); } rows.sort(Comparator.comparing(x -> ((BytesRef) x.get(0)))); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java index 6f2b83ef0aa6f..0bfaa2db2be5d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java @@ -6,23 +6,25 @@ */ package org.elasticsearch.xpack.esql.plan.physical; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.NamedExpression; -import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; public class EnrichExec extends UnaryExec implements EstimatesRowSize { + private final Enrich.Mode mode; private final NamedExpression matchField; private final String policyName; private final String policyMatchField; - private final EsIndex enrichIndex; + private final Map concreteIndices; // cluster -> enrich index private final List enrichFields; /** @@ -32,42 +34,58 @@ public class EnrichExec extends UnaryExec implements EstimatesRowSize { * @param matchField the match field in the source data * @param policyName the enrich policy name * @param policyMatchField the match field name in the policy - * @param enrichIndex the enricy policy index (the system index created by the policy execution, not the source index) + * @param concreteIndices a map from cluster to concrete enrich indices * @param enrichFields the enrich fields */ public EnrichExec( Source source, PhysicalPlan child, + Enrich.Mode mode, NamedExpression matchField, String policyName, String policyMatchField, - EsIndex enrichIndex, + Map concreteIndices, List enrichFields ) { super(source, child); + this.mode = mode; this.matchField = matchField; this.policyName = policyName; this.policyMatchField = policyMatchField; - this.enrichIndex = enrichIndex; + this.concreteIndices = concreteIndices; this.enrichFields = enrichFields; } @Override protected NodeInfo info() { - return NodeInfo.create(this, EnrichExec::new, child(), matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return NodeInfo.create( + this, + EnrichExec::new, + child(), + mode, + matchField, + policyName, + policyMatchField, + concreteIndices, + enrichFields + ); } @Override public EnrichExec replaceChild(PhysicalPlan newChild) { - return new EnrichExec(source(), newChild, matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return new EnrichExec(source(), newChild, mode, matchField, policyName, policyMatchField, concreteIndices, enrichFields); + } + + public Enrich.Mode mode() { + return mode; } public NamedExpression matchField() { return matchField; } - public EsIndex enrichIndex() { - return enrichIndex; + public Map concreteIndices() { + return concreteIndices; } public List enrichFields() { @@ -99,15 +117,16 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; EnrichExec that = (EnrichExec) o; - return Objects.equals(matchField, that.matchField) + return mode.equals(that.mode) + && Objects.equals(matchField, that.matchField) && Objects.equals(policyName, that.policyName) && Objects.equals(policyMatchField, that.policyMatchField) - && Objects.equals(enrichIndex, that.enrichIndex) + && Objects.equals(concreteIndices, that.concreteIndices) && Objects.equals(enrichFields, that.enrichFields); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return Objects.hash(super.hashCode(), mode, matchField, policyName, policyMatchField, concreteIndices, enrichFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FieldExtractExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FieldExtractExec.java index d252385acc89a..e6a0d352462c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FieldExtractExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FieldExtractExec.java @@ -7,23 +7,32 @@ package org.elasticsearch.xpack.esql.plan.physical; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.NodeUtils; import org.elasticsearch.xpack.ql.tree.Source; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Set; public class FieldExtractExec extends UnaryExec implements EstimatesRowSize { private final List attributesToExtract; private final Attribute sourceAttribute; + private final Set preferDocValues; public FieldExtractExec(Source source, PhysicalPlan child, List attributesToExtract) { + this(source, child, attributesToExtract, new HashSet<>()); + } + + public FieldExtractExec(Source source, PhysicalPlan child, List attributesToExtract, Set preferDocValues) { super(source, child); this.attributesToExtract = attributesToExtract; this.sourceAttribute = extractSourceAttributesFrom(child); + this.preferDocValues = preferDocValues; } public static Attribute extractSourceAttributesFrom(PhysicalPlan plan) { @@ -32,12 +41,12 @@ public static Attribute extractSourceAttributesFrom(PhysicalPlan plan) { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldExtractExec::new, child(), attributesToExtract); + return NodeInfo.create(this, FieldExtractExec::new, child(), attributesToExtract, preferDocValues); } @Override public UnaryExec replaceChild(PhysicalPlan newChild) { - return new FieldExtractExec(source(), newChild, attributesToExtract); + return new FieldExtractExec(source(), newChild, attributesToExtract, preferDocValues); } public List attributesToExtract() { @@ -63,7 +72,7 @@ public PhysicalPlan estimateRowSize(State state) { @Override public int hashCode() { - return Objects.hash(attributesToExtract, child()); + return Objects.hash(attributesToExtract, preferDocValues, child()); } @Override @@ -77,11 +86,28 @@ public boolean equals(Object obj) { } FieldExtractExec other = (FieldExtractExec) obj; - return Objects.equals(attributesToExtract, other.attributesToExtract) && Objects.equals(child(), other.child()); + return Objects.equals(attributesToExtract, other.attributesToExtract) + && Objects.equals(preferDocValues, other.preferDocValues) + && Objects.equals(child(), other.child()); } @Override public String nodeString() { - return nodeName() + NodeUtils.limitedToString(attributesToExtract); + return nodeName() + NodeUtils.limitedToString(attributesToExtract) + NodeUtils.limitedToString(preferDocValues); + } + + public FieldExtractExec preferDocValues(Attribute attr) { + Set newForStats = new HashSet<>(preferDocValues); + newForStats.add(attr); + return new FieldExtractExec(source(), child(), attributesToExtract, newForStats); + } + + /** + * Returns DOC_VALUES if the given attribute should be preferrentially extracted from doc-values. + */ + public MappedFieldType.FieldExtractPreference extractPreference(Attribute attr) { + return preferDocValues.contains(attr) + ? MappedFieldType.FieldExtractPreference.DOC_VALUES + : MappedFieldType.FieldExtractPreference.NONE; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 69e80a433f2d0..b4db1efa036b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -125,8 +125,8 @@ else if (mode == AggregateExec.Mode.PARTIAL) { if (mode == AggregateExec.Mode.FINAL) { for (var agg : aggregates) { - if (agg instanceof Alias alias && alias.child() instanceof AggregateFunction) { - layout.append(alias); + if (Alias.unwrap(agg) instanceof AggregateFunction) { + layout.append(agg); } } } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index da81800c09402..c375ef24da829 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.NumericAggregate; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.AttributeMap; @@ -30,6 +31,7 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.function.Function; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -44,9 +46,13 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; + public class AggregateMapper { static final List NUMERIC = List.of("Int", "Long", "Double"); + static final List SPATIAL = List.of("GeoPoint", "CartesianPoint"); /** List of all ESQL agg functions. */ static final List> AGG_FUNCTIONS = List.of( @@ -57,11 +63,12 @@ public class AggregateMapper { MedianAbsoluteDeviation.class, Min.class, Percentile.class, + SpatialCentroid.class, Sum.class ); /** Record of agg Class, type, and grouping (or non-grouping). */ - record AggDef(Class aggClazz, String type, boolean grouping) {} + record AggDef(Class aggClazz, String type, String extra, boolean grouping) {} /** Map of AggDef types to intermediate named expressions. */ private final Map> mapper; @@ -103,7 +110,7 @@ public List mapGrouping(Expression aggregate) { } private Stream map(Expression aggregate, boolean grouping) { - aggregate = unwrapAlias(aggregate); + aggregate = Alias.unwrap(aggregate); return cache.computeIfAbsent(aggregate, aggKey -> computeEntryForAgg(aggKey, grouping)).stream(); } @@ -131,28 +138,40 @@ private List getNonNull(AggDef aggDef) { return l; } - static Stream, String>> typeAndNames(Class clazz) { + private static Stream, Tuple>> typeAndNames(Class clazz) { List types; + List extraConfigs = List.of(""); if (NumericAggregate.class.isAssignableFrom(clazz)) { types = NUMERIC; } else if (clazz == Count.class) { types = List.of(""); // no extra type distinction + } else if (SpatialAggregateFunction.class.isAssignableFrom(clazz)) { + types = SPATIAL; + extraConfigs = List.of("SourceValues", "DocValues"); } else { assert clazz == CountDistinct.class : "Expected CountDistinct, got: " + clazz; types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); } - return types.stream().map(type -> new Tuple<>(clazz, type)); + return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); + } + + private static Stream> combinations(List types, List extraConfigs) { + return types.stream().flatMap(type -> extraConfigs.stream().map(config -> new Tuple<>(type, config))); } - static Stream groupingAndNonGrouping(Tuple, String> tuple) { - return Stream.of(new AggDef(tuple.v1(), tuple.v2(), true), new AggDef(tuple.v1(), tuple.v2(), false)); + private static Stream groupingAndNonGrouping(Tuple, Tuple> tuple) { + return Stream.of( + new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true), + new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false) + ); } - static AggDef aggDefOrNull(Expression aggregate, boolean grouping) { + private static AggDef aggDefOrNull(Expression aggregate, boolean grouping) { if (aggregate instanceof AggregateFunction aggregateFunction) { return new AggDef( aggregateFunction.getClass(), dataTypeToString(aggregateFunction.field().dataType(), aggregateFunction.getClass()), + aggregate instanceof SpatialCentroid ? "SourceValues" : "", grouping ); } @@ -160,9 +179,9 @@ static AggDef aggDefOrNull(Expression aggregate, boolean grouping) { } /** Retrieves the intermediate state description for a given class, type, and grouping. */ - static List lookupIntermediateState(AggDef aggDef) { + private static List lookupIntermediateState(AggDef aggDef) { try { - return (List) lookup(aggDef.aggClazz(), aggDef.type(), aggDef.grouping()).invokeExact(); + return (List) lookup(aggDef.aggClazz(), aggDef.type(), aggDef.extra(), aggDef.grouping()).invokeExact(); } catch (Throwable t) { // invokeExact forces us to handle any Throwable thrown by lookup. throw new EsqlIllegalArgumentException(t); @@ -170,11 +189,11 @@ static List lookupIntermediateState(AggDef aggDef) { } /** Looks up the intermediate state method for a given class, type, and grouping. */ - static MethodHandle lookup(Class clazz, String type, boolean grouping) { + private static MethodHandle lookup(Class clazz, String type, String extra, boolean grouping) { try { return MethodHandles.lookup() .findStatic( - Class.forName(determineAggName(clazz, type, grouping)), + Class.forName(determineAggName(clazz, type, extra, grouping)), "intermediateStateDesc", MethodType.methodType(List.class) ); @@ -184,24 +203,34 @@ static MethodHandle lookup(Class clazz, String type, boolean grouping) { } /** Determines the engines agg class name, for the given class, type, and grouping. */ - static String determineAggName(Class clazz, String type, boolean grouping) { + private static String determineAggName(Class clazz, String type, String extra, boolean grouping) { StringBuilder sb = new StringBuilder(); - sb.append("org.elasticsearch.compute.aggregation."); + sb.append(determinePackageName(clazz)).append("."); sb.append(clazz.getSimpleName()); sb.append(type); + sb.append(extra); sb.append(grouping ? "Grouping" : ""); sb.append("AggregatorFunction"); return sb.toString(); } + /** Determines the engine agg package name, for the given class. */ + private static String determinePackageName(Class clazz) { + if (clazz.getSimpleName().startsWith("Spatial")) { + // All spatial aggs are in the spatial sub-package + return "org.elasticsearch.compute.aggregation.spatial"; + } + return "org.elasticsearch.compute.aggregation"; + } + /** Maps intermediate state description to named expressions. */ - static Stream isToNE(List intermediateStateDescs) { + private static Stream isToNE(List intermediateStateDescs) { return intermediateStateDescs.stream().map(is -> new ReferenceAttribute(Source.EMPTY, is.name(), toDataType(is.type()))); } /** Returns the data type for the engines element type. */ // defaults to aggstate, but we'll eventually be able to remove this - static DataType toDataType(ElementType elementType) { + private static DataType toDataType(ElementType elementType) { return switch (elementType) { case BOOLEAN -> DataTypes.BOOLEAN; case BYTES_REF -> DataTypes.KEYWORD; @@ -213,7 +242,7 @@ static DataType toDataType(ElementType elementType) { } /** Returns the string representation for the data type. This reflects the engine's aggs naming structure. */ - static String dataTypeToString(DataType type, Class aggClass) { + private static String dataTypeToString(DataType type, Class aggClass) { if (aggClass == Count.class) { return ""; // no type distinction } @@ -227,12 +256,16 @@ static String dataTypeToString(DataType type, Class aggClass) { return "Double"; } else if (type.equals(DataTypes.KEYWORD) || type.equals(DataTypes.IP) || type.equals(DataTypes.TEXT)) { return "BytesRef"; + } else if (type.equals(GEO_POINT)) { + return "GeoPoint"; + } else if (type.equals(CARTESIAN_POINT)) { + return "CartesianPoint"; } else { throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); } } - static Expression unwrapAlias(Expression expression) { + private static Expression unwrapAlias(Expression expression) { if (expression instanceof Alias alias) return alias.child(); return expression; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index b324cf7c4056a..8a1c82baf45cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -9,24 +9,32 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.lucene.BlockReaderFactories; +import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -37,36 +45,53 @@ import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; +import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.function.Function; import java.util.function.IntFunction; import static org.elasticsearch.common.lucene.search.Queries.newNonNestedFilter; import static org.elasticsearch.compute.lucene.LuceneSourceOperator.NO_LIMIT; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProviders { + /** + * Context of each shard we're operating against. + */ + public interface ShardContext extends org.elasticsearch.compute.lucene.ShardContext { + /** + * Build something to load source {@code _source}. + */ + SourceLoader newSourceLoader(); - private final List searchContexts; + /** + * Convert a {@link QueryBuilder} into a real {@link Query lucene query}. + */ + Query toQuery(QueryBuilder queryBuilder); - public EsPhysicalOperationProviders(List searchContexts) { - this.searchContexts = searchContexts; + /** + * Returns something to load values from this field into a {@link Block}. + */ + BlockLoader blockLoader(String name, boolean asUnsupportedSource, MappedFieldType.FieldExtractPreference fieldExtractPreference); } - public List searchContexts() { - return searchContexts; + private final List shardContexts; + + public EsPhysicalOperationProviders(List shardContexts) { + this.shardContexts = shardContexts; } @Override public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fieldExtractExec, PhysicalOperation source) { - // TODO: see if we can get the FieldExtractExec to know if spatial types need to be read from source or doc values, and capture - // that information in the BlockReaderFactories.loaders method so it is passed in the BlockLoaderContext - // to GeoPointFieldMapper.blockLoader Layout.Builder layout = source.layout.builder(); var sourceAttr = fieldExtractExec.sourceAttribute(); - List readers = searchContexts.stream() + List readers = shardContexts.stream() .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader)) .toList(); List fields = new ArrayList<>(); @@ -74,49 +99,23 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi for (Attribute attr : fieldExtractExec.attributesToExtract()) { layout.append(attr); DataType dataType = attr.dataType(); - ElementType elementType = PlannerUtils.toElementType(dataType); + MappedFieldType.FieldExtractPreference fieldExtractPreference = fieldExtractExec.extractPreference(attr); + ElementType elementType = PlannerUtils.toElementType(dataType, fieldExtractPreference); String fieldName = attr.name(); boolean isSupported = EsqlDataTypes.isUnsupported(dataType); - IntFunction loader = s -> BlockReaderFactories.loader( - searchContexts.get(s).getSearchExecutionContext(), - fieldName, - isSupported - ); + IntFunction loader = s -> shardContexts.get(s).blockLoader(fieldName, isSupported, fieldExtractPreference); fields.add(new ValuesSourceReaderOperator.FieldInfo(fieldName, elementType, loader)); } return source.with(new ValuesSourceReaderOperator.Factory(fields, readers, docChannel), layout.build()); } - public static Function querySupplier(QueryBuilder queryBuilder) { - final QueryBuilder qb = queryBuilder == null ? QueryBuilders.matchAllQuery() : queryBuilder; - - return searchContext -> { - SearchExecutionContext ctx = searchContext.getSearchExecutionContext(); - Query query = ctx.toQuery(qb).query(); - NestedLookup nestedLookup = ctx.nestedLookup(); - if (nestedLookup != NestedLookup.EMPTY) { - NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); - if (nestedHelper.mightMatchNestedDocs(query)) { - // filter out nested documents - query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) - .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) - .build(); - } - } - AliasFilter aliasFilter = searchContext.request().getAliasFilter(); - if (aliasFilter != AliasFilter.EMPTY) { - Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query(); - query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) - .add(filterQuery, BooleanClause.Occur.FILTER) - .build(); - } - return query; - }; + public Function querySupplier(QueryBuilder builder) { + QueryBuilder qb = builder == null ? QueryBuilders.matchAllQuery() : builder; + return ctx -> shardContexts.get(ctx.index()).toQuery(qb); } @Override public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) { - Function querySupplier = querySupplier(esQueryExec.query()); final LuceneOperator.Factory luceneFactory; List sorts = esQueryExec.sorts(); @@ -130,8 +129,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, fieldSorts.add(sort.fieldSortBuilder()); } luceneFactory = new LuceneTopNSourceOperator.Factory( - searchContexts, - querySupplier, + shardContexts, + querySupplier(esQueryExec.query()), context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), @@ -140,8 +139,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, ); } else { luceneFactory = new LuceneSourceOperator.Factory( - searchContexts, - querySupplier, + shardContexts, + querySupplier(esQueryExec.query()), context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), @@ -155,6 +154,19 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, return PhysicalOperation.fromSource(luceneFactory, layout.build()); } + /** + * Build a {@link SourceOperator.SourceOperatorFactory} that counts documents in the search index. + */ + public LuceneCountOperator.Factory countSource(LocalExecutionPlannerContext context, QueryBuilder queryBuilder, Expression limit) { + return new LuceneCountOperator.Factory( + shardContexts, + querySupplier(queryBuilder), + context.queryPragmas().dataPartitioning(), + context.queryPragmas().taskConcurrency(), + limit == null ? NO_LIMIT : (Integer) limit.fold() + ); + } + @Override public final Operator.OperatorFactory ordinalGroupingOperatorFactory( LocalExecutionPlanner.PhysicalOperation source, @@ -166,19 +178,15 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( ) { var sourceAttribute = FieldExtractExec.extractSourceAttributesFrom(aggregateExec.child()); int docChannel = source.layout.get(sourceAttribute.id()).channel(); - List shardContexts = searchContexts.stream() + List vsShardContexts = shardContexts.stream() .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader)) .toList(); // The grouping-by values are ready, let's group on them directly. // Costin: why are they ready and not already exposed in the layout? boolean isUnsupported = EsqlDataTypes.isUnsupported(attrSource.dataType()); return new OrdinalsGroupingOperator.OrdinalsGroupingOperatorFactory( - shardIdx -> BlockReaderFactories.loader( - searchContexts.get(shardIdx).getSearchExecutionContext(), - attrSource.name(), - isUnsupported - ), - shardContexts, + shardIdx -> shardContexts.get(shardIdx).blockLoader(attrSource.name(), isUnsupported, NONE), + vsShardContexts, groupElementType, docChannel, attrSource.name(), @@ -186,4 +194,116 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( context.pageSize(aggregateExec.estimatedRowSize()) ); } + + public static class DefaultShardContext implements ShardContext { + private final int index; + private final SearchExecutionContext ctx; + private final AliasFilter aliasFilter; + + public DefaultShardContext(int index, SearchExecutionContext ctx, AliasFilter aliasFilter) { + this.index = index; + this.ctx = ctx; + this.aliasFilter = aliasFilter; + } + + @Override + public int index() { + return index; + } + + @Override + public IndexSearcher searcher() { + return ctx.searcher(); + } + + @Override + public Optional buildSort(List> sorts) throws IOException { + return SortBuilder.buildSort(sorts, ctx); + } + + @Override + public String shardIdentifier() { + return ctx.getFullyQualifiedIndex().getName() + ":" + ctx.getShardId(); + } + + @Override + public SourceLoader newSourceLoader() { + return ctx.newSourceLoader(false); + } + + @Override + public Query toQuery(QueryBuilder queryBuilder) { + Query query = ctx.toQuery(queryBuilder).query(); + NestedLookup nestedLookup = ctx.nestedLookup(); + if (nestedLookup != NestedLookup.EMPTY) { + NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); + if (nestedHelper.mightMatchNestedDocs(query)) { + // filter out nested documents + query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) + .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) + .build(); + } + } + if (aliasFilter != AliasFilter.EMPTY) { + Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query(); + query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) + .add(filterQuery, BooleanClause.Occur.FILTER) + .build(); + } + return query; + } + + @Override + public BlockLoader blockLoader( + String name, + boolean asUnsupportedSource, + MappedFieldType.FieldExtractPreference fieldExtractPreference + ) { + if (asUnsupportedSource) { + return BlockLoader.CONSTANT_NULLS; + } + MappedFieldType fieldType = ctx.getFieldType(name); + if (fieldType == null) { + // the field does not exist in this context + return BlockLoader.CONSTANT_NULLS; + } + BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + return ctx.getFullyQualifiedIndex().getName(); + } + + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return fieldExtractPreference; + } + + @Override + public SearchLookup lookup() { + return ctx.lookup(); + } + + @Override + public Set sourcePaths(String name) { + return ctx.sourcePath(name); + } + + @Override + public String parentField(String field) { + return ctx.parentPath(field); + } + + @Override + public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { + return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME); + } + }); + if (loader == null) { + HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", name); + return BlockLoader.CONSTANT_NULLS; + } + + return loader; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java index 98ac1a2d9910a..98b1037c704f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java @@ -7,27 +7,63 @@ package org.elasticsearch.xpack.esql.planner; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.MetadataAttribute; +import org.elasticsearch.xpack.ql.expression.TypedAttribute; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.ql.planner.ExpressionTranslator; import org.elasticsearch.xpack.ql.planner.ExpressionTranslators; import org.elasticsearch.xpack.ql.planner.QlTranslatorHandler; +import org.elasticsearch.xpack.ql.planner.TranslatorHandler; import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.querydsl.query.TermQuery; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.Check; +import java.util.List; import java.util.function.Supplier; public final class EsqlTranslatorHandler extends QlTranslatorHandler { + + public static final List> QUERY_TRANSLATORS = List.of( + new EqualsIgnoreCaseTranslator(), + new ExpressionTranslators.BinaryComparisons(), + new ExpressionTranslators.Ranges(), + new ExpressionTranslators.BinaryLogic(), + new ExpressionTranslators.IsNulls(), + new ExpressionTranslators.IsNotNulls(), + new ExpressionTranslators.Nots(), + new ExpressionTranslators.Likes(), + new ExpressionTranslators.InComparisons(), + new ExpressionTranslators.StringQueries(), + new ExpressionTranslators.Matches(), + new ExpressionTranslators.MultiMatches(), + new ExpressionTranslators.Scalars() + ); + @Override public Query asQuery(Expression e) { - return ExpressionTranslators.toQuery(e, this); + Query translation = null; + for (ExpressionTranslator translator : QUERY_TRANSLATORS) { + translation = translator.translate(e, this); + if (translation != null) { + return translation; + } + } + + throw new QlIllegalArgumentException("Don't know how to translate {} {}", e.nodeName(), e); } @Override @@ -56,4 +92,36 @@ public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier { + + @Override + protected Query asQuery(InsensitiveEquals bc, TranslatorHandler handler) { + return doTranslate(bc, handler); + } + + public static Query doTranslate(InsensitiveEquals bc, TranslatorHandler handler) { + checkInsensitiveComparison(bc); + return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc)); + } + + public static void checkInsensitiveComparison(InsensitiveEquals bc) { + Check.isTrue( + bc.right().foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", + bc.right().sourceLocation().getLineNumber(), + bc.right().sourceLocation().getColumnNumber(), + Expressions.name(bc.right()), + bc.symbol() + ); + } + + static Query translate(InsensitiveEquals bc) { + TypedAttribute attribute = checkIsPushableAttribute(bc.left()); + Source source = bc.source(); + BytesRef value = BytesRefs.toBytesRef(ExpressionTranslators.valueOf(bc.right())); + String name = pushableAttributeName(attribute); + return new TermQuery(source, name, value.utf8ToString(), true); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 15aec4545e7e7..8f4dd902a44e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -7,17 +7,14 @@ package org.elasticsearch.xpack.esql.planner; -import org.apache.lucene.search.Query; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; import org.elasticsearch.compute.operator.Driver; @@ -49,7 +46,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; @@ -96,13 +92,11 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.function.Function; import java.util.stream.Stream; import static java.util.Arrays.asList; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.lucene.LuceneOperator.NO_LIMIT; import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; @@ -114,6 +108,7 @@ public class LocalExecutionPlanner { private static final Logger logger = LogManager.getLogger(LocalExecutionPlanner.class); private final String sessionId; + private final String clusterAlias; private final CancellableTask parentTask; private final BigArrays bigArrays; private final BlockFactory blockFactory; @@ -126,6 +121,7 @@ public class LocalExecutionPlanner { public LocalExecutionPlanner( String sessionId, + String clusterAlias, CancellableTask parentTask, BigArrays bigArrays, BlockFactory blockFactory, @@ -137,6 +133,7 @@ public LocalExecutionPlanner( PhysicalOperationProviders physicalOperationProviders ) { this.sessionId = sessionId; + this.clusterAlias = clusterAlias; this.parentTask = parentTask; this.bigArrays = bigArrays; this.blockFactory = blockFactory; @@ -253,17 +250,7 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio EsStatsQueryExec.Stat stat = statsQuery.stats().get(0); EsPhysicalOperationProviders esProvider = (EsPhysicalOperationProviders) physicalOperationProviders; - Function querySupplier = EsPhysicalOperationProviders.querySupplier(stat.filter(statsQuery.query())); - - Expression limitExp = statsQuery.limit(); - int limit = limitExp != null ? (Integer) limitExp.fold() : NO_LIMIT; - final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory( - esProvider.searchContexts(), - querySupplier, - context.queryPragmas.dataPartitioning(), - context.queryPragmas.taskConcurrency(), - limit - ); + final LuceneOperator.Factory luceneFactory = esProvider.countSource(context, stat.filter(statsQuery.query()), statsQuery.limit()); Layout.Builder layout = new Layout.Builder(); layout.append(statsQuery.outputSet()); @@ -357,7 +344,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case "version" -> TopNEncoder.VERSION; case "boolean", "null", "byte", "short", "integer", "long", "double", "float", "half_float", "datetime", "date_period", "time_duration", "object", "nested", "scaled_float", "unsigned_long", "_doc" -> TopNEncoder.DEFAULT_SORTABLE; - case "geo_point", "cartesian_point" -> TopNEncoder.DEFAULT_UNSORTABLE; + case "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case "unsupported" -> TopNEncoder.UNSUPPORTED; default -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); @@ -470,11 +457,10 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon Layout.Builder layoutBuilder = source.layout.builder(); layoutBuilder.append(enrich.enrichFields()); Layout layout = layoutBuilder.build(); - Set indices = enrich.enrichIndex().concreteIndices(); - if (indices.size() != 1) { - throw new EsqlIllegalArgumentException("Resolved enrich should have one concrete index; got " + indices); + String enrichIndex = enrich.concreteIndices().get(clusterAlias); + if (enrichIndex == null) { + throw new EsqlIllegalArgumentException("No concrete enrich index for cluster [" + clusterAlias + "]"); } - String enrichIndex = Iterables.get(indices, 0); return source.with( new EnrichLookupOperator.Factory( sessionId, @@ -526,7 +512,7 @@ private PhysicalOperation planProject(ProjectExec project, LocalExecutionPlanner for (int index = 0, size = projections.size(); index < size; index++) { NamedExpression ne = projections.get(index); - NameId inputId; + NameId inputId = null; if (ne instanceof Alias a) { inputId = ((NamedExpression) a.child()).id(); } else { @@ -642,6 +628,11 @@ public String describe() { Stream.of(sinkOperatorFactory) ).map(Describable::describe).collect(joining("\n\\_", "\\_", "")); } + + @Override + public String toString() { + return describe(); + } } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 3eea84b0bd1f9..9410e9e97d078 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -142,10 +143,11 @@ private PhysicalPlan map(UnaryPlan p, PhysicalPlan child) { return new EnrichExec( enrich.source(), child, + enrich.mode(), enrich.matchField(), - enrich.policy().policyName(), - enrich.policy().policy().getMatchField(), - enrich.policy().index().get(), + BytesRefs.toString(enrich.policyName().fold()), + enrich.policy().getMatchField(), + enrich.concreteIndices(), enrich.enrichFields() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 1c20e55f289c3..316c12eba6cbb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -52,6 +53,7 @@ import java.util.function.Predicate; import static java.util.Arrays.asList; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; import static org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer.PushFiltersToSource.canPushToSource; import static org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer.TRANSLATOR_HANDLER; import static org.elasticsearch.xpack.ql.util.Queries.Clause.FILTER; @@ -72,14 +74,14 @@ public static Tuple breakPlanBetweenCoordinatorAndDa return new Tuple<>(coordinatorPlan, dataNodePlan.get()); } - public static boolean hasEnrich(PhysicalPlan plan) { + public static boolean hasUnsupportedEnrich(PhysicalPlan plan) { boolean[] found = { false }; plan.forEachDown(p -> { - if (p instanceof EnrichExec) { + if (p instanceof EnrichExec enrich && enrich.mode() != Enrich.Mode.ANY) { found[0] = true; } if (p instanceof FragmentExec f) { - f.fragment().forEachDown(Enrich.class, e -> found[0] = true); + f.fragment().forEachDown(Enrich.class, e -> found[0] |= e.mode() != Enrich.Mode.ANY); } }); return found[0]; @@ -197,11 +199,10 @@ static QueryBuilder detectFilter(PhysicalPlan plan, String fieldName, Predicate< /** * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. - * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG - * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). + * This specifically excludes spatial data types, which are not themselves sortable. */ public static ElementType toSortableElementType(DataType dataType) { - if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(dataType)) { return ElementType.UNKNOWN; } return toElementType(dataType); @@ -211,6 +212,15 @@ public static ElementType toSortableElementType(DataType dataType) { * Map QL's {@link DataType} to the compute engine's {@link ElementType}. */ public static ElementType toElementType(DataType dataType) { + return toElementType(dataType, MappedFieldType.FieldExtractPreference.NONE); + } + + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}. + * Under some situations, the same data type might be extracted into a different element type. + * For example, spatial types can be extracted into doc-values under specific conditions, otherwise they extract as BytesRef. + */ + public static ElementType toElementType(DataType dataType, MappedFieldType.FieldExtractPreference fieldExtractPreference) { if (dataType == DataTypes.LONG || dataType == DataTypes.DATETIME || dataType == DataTypes.UNSIGNED_LONG) { return ElementType.LONG; } @@ -238,11 +248,11 @@ public static ElementType toElementType(DataType dataType) { if (dataType == EsQueryExec.DOC_DATA_TYPE) { return ElementType.DOC; } - // TODO: Spatial types can be read from source into BYTES_REF, or read from doc-values into LONG - if (dataType == EsqlDataTypes.GEO_POINT) { - return ElementType.BYTES_REF; + if (EsqlDataTypes.isSpatialPoint(dataType)) { + return fieldExtractPreference == DOC_VALUES ? ElementType.LONG : ElementType.BYTES_REF; } - if (dataType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(dataType)) { + // TODO: support forStats for shape aggregations, like st_centroid return ElementType.BYTES_REF; } throw EsqlIllegalArgumentException.illegalDataType(dataType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index aa1eafbf90265..ef9bd6a9103af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardsGroup; @@ -22,7 +23,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; @@ -155,7 +155,14 @@ public void execute( .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planConcreteIndices(physicalPlan).toArray(String[]::new)); QueryPragmas queryPragmas = configuration.pragmas(); if (dataNodePlan == null || clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0)) { - var computeContext = new ComputeContext(sessionId, List.of(), configuration, null, null); + var computeContext = new ComputeContext( + sessionId, + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + List.of(), + configuration, + null, + null + ); runCompute( rootTask, computeContext, @@ -168,8 +175,8 @@ public void execute( .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planOriginalIndices(physicalPlan)); var localOriginalIndices = clusterToOriginalIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); var localConcreteIndices = clusterToConcreteIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - if (clusterToOriginalIndices.isEmpty() == false && PlannerUtils.hasEnrich(physicalPlan)) { - listener.onFailure(new IllegalArgumentException("cross clusters query doesn't support enrich yet")); + if (PlannerUtils.hasUnsupportedEnrich(physicalPlan)) { + listener.onFailure(new IllegalArgumentException("Enrich modes COORDINATOR and REMOTE are not supported yet")); return; } final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); @@ -187,7 +194,7 @@ public void execute( // run compute on the coordinator runCompute( rootTask, - new ComputeContext(sessionId, List.of(), configuration, exchangeSource, null), + new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), coordinatorPlan, cancelOnFailure(rootTask, cancelled, refs.acquire()).map(driverProfiles -> { responseHeadersCollector.collect(); @@ -363,10 +370,22 @@ private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) { listener = ActionListener.runAfter(listener, () -> Releasables.close(context.searchContexts)); + List contexts = new ArrayList<>(context.searchContexts.size()); + for (int i = 0; i < context.searchContexts.size(); i++) { + SearchContext searchContext = context.searchContexts.get(i); + contexts.add( + new EsPhysicalOperationProviders.DefaultShardContext( + i, + searchContext.getSearchExecutionContext(), + searchContext.request().getAliasFilter() + ) + ); + } final List drivers; try { LocalExecutionPlanner planner = new LocalExecutionPlanner( context.sessionId, + context.clusterAlias, task, bigArrays, blockFactory, @@ -375,7 +394,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSource(), context.exchangeSink(), enrichLookupService, - new EsPhysicalOperationProviders(context.searchContexts) + new EsPhysicalOperationProviders(contexts) ); LOGGER.debug("Received physical plan:\n{}", plan); @@ -416,51 +435,60 @@ private void acquireSearchContexts( Map aliasFilters, ActionListener> listener ) { + final List targetShards = new ArrayList<>(); try { - List targetShards = new ArrayList<>(); for (ShardId shardId : shardIds) { var indexShard = searchService.getIndicesService().indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); targetShards.add(indexShard); } - if (targetShards.isEmpty()) { - listener.onResponse(List.of()); - return; + } catch (Exception e) { + listener.onFailure(e); + return; + } + final var doAcquire = ActionRunnable.supply(listener, () -> { + final List searchContexts = new ArrayList<>(targetShards.size()); + boolean success = false; + try { + for (IndexShard shard : targetShards) { + var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); + var shardRequest = new ShardSearchRequest( + shard.shardId(), + configuration.absoluteStartedTimeInMillis(), + aliasFilter, + clusterAlias + ); + SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); + searchContexts.add(context); + } + for (SearchContext searchContext : searchContexts) { + searchContext.preProcess(); + } + success = true; + return searchContexts; + } finally { + if (success == false) { + IOUtils.close(searchContexts); + } } - CountDown countDown = new CountDown(targetShards.size()); + }); + final AtomicBoolean waitedForRefreshes = new AtomicBoolean(); + try (RefCountingRunnable refs = new RefCountingRunnable(() -> { + if (waitedForRefreshes.get()) { + esqlExecutor.execute(doAcquire); + } else { + doAcquire.run(); + } + })) { for (IndexShard targetShard : targetShards) { - targetShard.ensureShardSearchActive(ignored -> { - if (countDown.countDown()) { - ActionListener.completeWith(listener, () -> { - final List searchContexts = new ArrayList<>(targetShards.size()); - boolean success = false; - try { - for (IndexShard shard : targetShards) { - var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); - var shardRequest = new ShardSearchRequest( - shard.shardId(), - configuration.absoluteStartedTimeInMillis(), - aliasFilter, - clusterAlias - ); - SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); - searchContexts.add(context); - } - for (SearchContext searchContext : searchContexts) { - searchContext.preProcess(); - } - success = true; - return searchContexts; - } finally { - if (success == false) { - IOUtils.close(searchContexts); - } - } - }); + final Releasable ref = refs.acquire(); + targetShard.ensureShardSearchActive(await -> { + try (ref) { + if (await) { + waitedForRefreshes.set(true); + } } }); } - } catch (Exception e) { - listener.onFailure(e); } } @@ -559,13 +587,15 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T ); final ActionListener listener = new ChannelActionListener<>(channel); final EsqlConfiguration configuration = request.configuration(); + String clusterAlias = request.clusterAlias(); acquireSearchContexts( - request.clusterAlias(), + clusterAlias, request.shardIds(), configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { - var computeContext = new ComputeContext(sessionId, searchContexts, configuration, null, exchangeSink); + assert ThreadPool.assertCurrentThreadPool(ESQL_THREAD_POOL_NAME); + var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { // don't return until all pages are fetched exchangeSink.addCompletionListener( @@ -658,7 +688,7 @@ void runComputeOnRemoteCluster( ); runCompute( parentTask, - new ComputeContext(localSessionId, List.of(), configuration, exchangeSource, exchangeSink), + new ComputeContext(localSessionId, clusterAlias, List.of(), configuration, exchangeSource, exchangeSink), coordinatorPlan, cancelOnFailure(parentTask, cancelled, refs.acquire()).map(driverProfiles -> { responseHeadersCollector.collect(); @@ -691,6 +721,7 @@ void runComputeOnRemoteCluster( record ComputeContext( String sessionId, + String clusterAlias, List searchContexts, EsqlConfiguration configuration, ExchangeSourceHandler exchangeSource, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 07ca55aa665eb..aba1f5cfd6b40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -138,6 +138,7 @@ public List> getSettings() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlStatsAction.java index 0c23f5f05af6f..98a9a4a9e8a5e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlStatsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class EsqlStatsAction extends ActionType { @@ -16,6 +15,6 @@ public class EsqlStatsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/esql/stats/dist"; private EsqlStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index 0e481c3dd762b..bc5f7349db8c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -190,7 +190,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_065; // This is 8.11 - the first version of ESQL + return TransportVersions.V_8_11_X; // the first version of ESQL } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 284c78c6e0121..642a50ae34fef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; -import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -22,6 +21,7 @@ import org.elasticsearch.xpack.esql.analysis.PreAnalyzer; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -52,7 +52,7 @@ import org.elasticsearch.xpack.ql.type.InvalidMappedField; import org.elasticsearch.xpack.ql.util.Holder; -import java.util.HashSet; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; @@ -150,32 +150,40 @@ public void analyzedPlan(LogicalPlan parsed, ActionListener listene private void preAnalyze(LogicalPlan parsed, BiFunction action, ActionListener listener) { PreAnalyzer.PreAnalysis preAnalysis = preAnalyzer.preAnalyze(parsed); - Set policyNames = new HashSet<>(preAnalysis.policyNames); - EnrichResolution resolution = new EnrichResolution(ConcurrentCollections.newConcurrentSet(), enrichPolicyResolver.allPolicyNames()); - - ActionListener groupedListener = listener.delegateFailureAndWrap((l, unused) -> { - assert resolution.resolvedPolicies().size() == policyNames.size() - : resolution.resolvedPolicies().size() + " != " + policyNames.size(); - + var unresolvedPolicies = preAnalysis.enriches.stream() + .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) + .collect(Collectors.toSet()); + final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( + preAnalysis.indices.stream() + .flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))) + .toArray(String[]::new) + ).keySet(); + enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API - var matchFields = resolution.resolvedPolicies() + var matchFields = enrichResolution.resolvedEnrichPolicies() .stream() - .filter(p -> p.index().isValid()) // only if the policy by the specified name was found; later the Verifier will be - // triggered - .map(p -> p.policy().getMatchField()) + .map(ResolvedEnrichPolicy::matchField) .collect(Collectors.toSet()); - - preAnalyzeIndices( - parsed, - l.delegateFailureAndWrap((ll, indexResolution) -> ll.onResponse(action.apply(indexResolution, resolution))), - matchFields - ); - }); - try (RefCountingListener refs = new RefCountingListener(groupedListener)) { - for (String policyName : policyNames) { - enrichPolicyResolver.resolvePolicy(policyName, refs.acquire(resolution.resolvedPolicies()::add)); - } - } + preAnalyzeIndices(parsed, l.delegateFailureAndWrap((ll, indexResolution) -> { + if (indexResolution.isValid()) { + Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( + indexResolution.get().concreteIndices().toArray(String[]::new) + ).keySet(); + // If new clusters appear when resolving the main indices, we need to resolve the enrich policies again + // or exclude main concrete indices. Since this is rare, it's simpler to resolve the enrich policies again. + // TODO: add a test for this + if (targetClusters.containsAll(newClusters) == false) { + enrichPolicyResolver.resolvePolicies( + newClusters, + unresolvedPolicies, + ll.map(newEnrichResolution -> action.apply(indexResolution, newEnrichResolution)) + ); + return; + } + } + ll.onResponse(action.apply(indexResolution, enrichResolution)); + }), matchFields); + })); } private void preAnalyzeIndices(LogicalPlan parsed, ActionListener listener, Set enrichPolicyMatchFields) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index eae808abb5037..0813069330879 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -47,6 +47,8 @@ public final class EsqlDataTypes { public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false); public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, false); + public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, false); + public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, false); private static final Collection TYPES = Stream.of( BOOLEAN, @@ -72,7 +74,9 @@ public final class EsqlDataTypes { VERSION, UNSIGNED_LONG, GEO_POINT, - CARTESIAN_POINT + CARTESIAN_POINT, + CARTESIAN_SHAPE, + GEO_SHAPE ).sorted(Comparator.comparing(DataType::typeName)).toList(); private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); @@ -83,6 +87,7 @@ public final class EsqlDataTypes { Map map = TYPES.stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); // ES calls this 'point', but ESQL calls it 'cartesian_point' map.put("point", CARTESIAN_POINT); + map.put("shape", CARTESIAN_SHAPE); ES_TO_TYPE = Collections.unmodifiableMap(map); } @@ -167,6 +172,10 @@ public static boolean isNullOrTimeDuration(DataType t) { } public static boolean isSpatial(DataType t) { + return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; + } + + public static boolean isSpatialPoint(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 39a7eee2e616d..a77045262c114 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -39,6 +40,7 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -48,7 +50,7 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; @@ -60,6 +62,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestLocalPhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.TestPhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; @@ -89,7 +92,6 @@ import java.net.URL; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -263,18 +265,27 @@ private static IndexResolution loadIndexResolution(String mappingName, String in } private static EnrichResolution loadEnrichPolicies() { - Set names = new HashSet<>(); - Set resolutions = new HashSet<>(); + EnrichResolution enrichResolution = new EnrichResolution(); for (CsvTestsDataLoader.EnrichConfig policyConfig : CsvTestsDataLoader.ENRICH_POLICIES) { EnrichPolicy policy = loadEnrichPolicyMapping(policyConfig.policyFileName()); CsvTestsDataLoader.TestsDataset sourceIndex = CSV_DATASET_MAP.get(policy.getIndices().get(0)); // this could practically work, but it's wrong: // EnrichPolicyResolution should contain the policy (system) index, not the source index - IndexResolution idxRes = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName()); - names.add(policyConfig.policyName()); - resolutions.add(new EnrichPolicyResolution(policyConfig.policyName(), policy, idxRes)); + EsIndex esIndex = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName()).get(); + var concreteIndices = Map.of(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + enrichResolution.addResolvedPolicy( + policyConfig.policyName(), + Enrich.Mode.ANY, + new ResolvedEnrichPolicy( + policy.getMatchField(), + policy.getType(), + policy.getEnrichFields(), + concreteIndices, + esIndex.mapping() + ) + ); } - return new EnrichResolution(resolutions, names); + return enrichResolution; } private static EnrichPolicy loadEnrichPolicyMapping(String policyFileName) { @@ -338,6 +349,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { ); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( sessionId, + "", new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, blockFactory, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index fa5334fb33ef7..3b64870a15839 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ParserConstructor; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -56,10 +57,13 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.stream.Stream; +import static org.elasticsearch.common.xcontent.ChunkedToXContent.wrapAsToXContent; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; import static org.elasticsearch.xpack.esql.action.ResponseValueUtils.valuesToPage; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; @@ -150,9 +154,13 @@ private Page randomPage(List columns) { new BytesRef(UnsupportedValueSource.UNSUPPORTED_OUTPUT) ); case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); - case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.pointAsWKB(GeometryTestUtils.randomPoint())); - case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef( - CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()) + case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint())); + case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint())); + case "geo_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean())) + ); + case "cartesian_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())) ); case "null" -> builder.appendNull(); case "_source" -> { @@ -325,28 +333,38 @@ public void testChunkResponseSizeRows() { public void testSimpleXContentColumnar() { try (EsqlQueryResponse response = simple(true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); } } + public void testSimpleXContentColumnarDropNulls() { + try (EsqlQueryResponse response = simple(true)) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo(""" + {"all_columns":[{"name":"foo","type":"integer"}],"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""") + ); + } + } + public void testSimpleXContentColumnarAsync() { try (EsqlQueryResponse response = simple(true, true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); } } public void testSimpleXContentRows() { try (EsqlQueryResponse response = simple(false)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); } } public void testSimpleXContentRowsAsync() { try (EsqlQueryResponse response = simple(false, true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); } } @@ -368,6 +386,58 @@ public void testBasicXContentIdAndRunning() { } } + public void testNullColumnsXContentDropNulls() { + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), blockFactory.newConstantNullBlock(2))), + null, + false, + null, + false, + false + ) + ) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo("{" + """ + "all_columns":[{"name":"foo","type":"integer"},{"name":"all_null","type":"integer"}],""" + """ + "columns":[{"name":"foo","type":"integer"}],""" + """ + "values":[[40],[80]]}""") + ); + } + } + + /** + * This is a paranoid test to make sure the {@link Block}s produced by {@link Block.Builder} + * that contain only {@code null} entries are properly recognized by the {@link EsqlQueryResponse#DROP_NULL_COLUMNS_OPTION}. + */ + public void testNullColumnsFromBuilderXContentDropNulls() { + try (IntBlock.Builder b = blockFactory.newIntBlockBuilder(2)) { + b.appendNull(); + b.appendNull(); + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), b.build())), + null, + false, + null, + false, + false + ) + ) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo("{" + """ + "all_columns":[{"name":"foo","type":"integer"},{"name":"all_null","type":"integer"}],""" + """ + "columns":[{"name":"foo","type":"integer"}],""" + """ + "values":[[40],[80]]}""") + ); + } + } + } + private EsqlQueryResponse simple(boolean columnar) { return simple(columnar, false); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index e357efe3fcc1f..38588090d1656 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -9,9 +9,10 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; @@ -19,7 +20,7 @@ import java.util.ArrayList; import java.util.List; -import java.util.Set; +import java.util.Map; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; @@ -88,28 +89,19 @@ public static IndexResolution analyzerExpandedDefaultMapping() { } public static EnrichResolution defaultEnrichResolution() { - EnrichPolicyResolution policyRes = loadEnrichPolicyResolution( - "languages", - "language_code", - "languages_idx", - "mapping-languages.json" - ); - return new EnrichResolution(Set.of(policyRes), Set.of("languages")); + return loadEnrichPolicyResolution("languages", "language_code", "languages_idx", "mapping-languages.json"); } - public static EnrichPolicyResolution loadEnrichPolicyResolution( - String policyName, - String matchField, - String idxName, - String mappingFile - ) { + public static EnrichResolution loadEnrichPolicyResolution(String policyName, String matchField, String idxName, String mappingFile) { IndexResolution mapping = loadMapping(mappingFile, idxName); List enrichFields = new ArrayList<>(mapping.get().mapping().keySet()); enrichFields.remove(matchField); - return new EnrichPolicyResolution( + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy( policyName, - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(idxName), matchField, enrichFields), - mapping + Enrich.Mode.ANY, + new ResolvedEnrichPolicy(matchField, EnrichPolicy.MATCH_TYPE, enrichFields, Map.of("", idxName), mapping.get().mapping()) ); + return enrichResolution; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 90e45a0a8b5a7..a1d5374773eb4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -16,11 +16,11 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.parser.ParsingException; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Row; @@ -52,9 +52,9 @@ import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.IntStream; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -1255,78 +1255,78 @@ public void testEmptyEsRelationOnCountStar() throws IOException { } public void testUnsupportedFieldsInStats() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | stats max(shape) + | stats max(unsupported) """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by shape + | stats max(int) by unsupported """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by bool, shape + | stats max(int) by bool, unsupported """, errorMsg); } public void testUnsupportedFieldsInEval() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | eval x = shape + | eval x = unsupported """, errorMsg); verifyUnsupported(""" from test - | eval foo = 1, x = shape + | eval foo = 1, x = unsupported """, errorMsg); verifyUnsupported(""" from test - | eval x = 1 + shape + | eval x = 1 + unsupported """, errorMsg); } public void testUnsupportedFieldsInWhere() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | where shape == "[1.0, 1.0]" + | where unsupported == "[1.0, 1.0]" """, errorMsg); verifyUnsupported(""" from test - | where int > 2 and shape == "[1.0, 1.0]" + | where int > 2 and unsupported == "[1.0, 1.0]" """, errorMsg); } public void testUnsupportedFieldsInSort() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | sort shape + | sort unsupported """, errorMsg); verifyUnsupported(""" from test - | sort int, shape + | sort int, unsupported """, errorMsg); } public void testUnsupportedFieldsInDissect() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | dissect shape \"%{foo}\" + | dissect unsupported \"%{foo}\" """, errorMsg); } public void testUnsupportedFieldsInGrok() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | grok shape \"%{WORD:foo}\" + | grok unsupported \"%{WORD:foo}\" """, errorMsg); } @@ -1350,7 +1350,8 @@ public void testRegexOnInt() { public void testUnsupportedTypesWithToString() { // DATE_PERIOD and TIME_DURATION types have been added, but not really patched through the engine; i.e. supported. - final String supportedTypes = "boolean or cartesian_point or datetime or geo_point or ip or numeric or string or version"; + final String supportedTypes = + "boolean or cartesian_point or cartesian_shape or datetime or geo_point or geo_shape or ip or numeric or string or version"; verifyUnsupported( "row period = 1 year | eval to_string(period)", "line 1:28: argument of [to_string(period)] must be [" + supportedTypes + "], found value [period] type [date_period]" @@ -1359,31 +1360,58 @@ public void testUnsupportedTypesWithToString() { "row duration = 1 hour | eval to_string(duration)", "line 1:30: argument of [to_string(duration)] must be [" + supportedTypes + "], found value [duration] type [time_duration]" ); - verifyUnsupported("from test | eval to_string(shape)", "line 1:28: Cannot use field [shape] with unsupported type [geo_shape]"); + verifyUnsupported( + "from test | eval to_string(unsupported)", + "line 1:28: Cannot use field [unsupported] with unsupported type [ip_range]" + ); } - public void testNonExistingEnrichPolicy() { - var e = expectThrows(VerificationException.class, () -> analyze(""" - from test - | enrich foo on bar - """)); - assertThat(e.getMessage(), containsString("unresolved enrich policy [foo]")); - } + public void testEnrichPolicyWithError() { + IndexResolution testIndex = loadMapping("mapping-basic.json", "test"); + IndexResolution languageIndex = loadMapping("mapping-languages.json", "languages"); + EnrichResolution enrichResolution = new EnrichResolution(); + Map enrichIndices = Map.of("", "languages"); + enrichResolution.addResolvedPolicy( + "languages", + Enrich.Mode.COORDINATOR, + new ResolvedEnrichPolicy( + "language_code", + "match", + List.of("language_code", "language_name"), + enrichIndices, + languageIndex.get().mapping() + ) + ); + enrichResolution.addError("languages", Enrich.Mode.REMOTE, "error-1"); + enrichResolution.addError("languages", Enrich.Mode.ANY, "error-2"); + enrichResolution.addError("foo", Enrich.Mode.ANY, "foo-error-101"); - public void testNonExistingEnrichNoMatchField() { - var e = expectThrows(VerificationException.class, () -> analyze(""" - from test - | enrich foo - """)); - assertThat(e.getMessage(), containsString("unresolved enrich policy [foo]")); - } + AnalyzerContext context = new AnalyzerContext(configuration("from test"), new EsqlFunctionRegistry(), testIndex, enrichResolution); + Analyzer analyzer = new Analyzer(context, TEST_VERIFIER); + { + LogicalPlan plan = analyze("from test | EVAL x = to_string(languages) | ENRICH[ccq.mode:coordinator] languages ON x", analyzer); + List resolved = new ArrayList<>(); + plan.forEachDown(Enrich.class, resolved::add); + assertThat(resolved, hasSize(1)); + } + var e = expectThrows( + VerificationException.class, + () -> analyze("from test | EVAL x = to_string(languages) | ENRICH[ccq.mode:any] languages ON x", analyzer) + ); + assertThat(e.getMessage(), containsString("error-2")); + e = expectThrows( + VerificationException.class, + () -> analyze("from test | EVAL x = to_string(languages) | ENRICH languages ON xs", analyzer) + ); + assertThat(e.getMessage(), containsString("error-2")); + e = expectThrows( + VerificationException.class, + () -> analyze("from test | EVAL x = to_string(languages) | ENRICH[ccq.mode:remote] languages ON x", analyzer) + ); + assertThat(e.getMessage(), containsString("error-1")); - public void testNonExistingEnrichPolicyWithSimilarName() { - var e = expectThrows(VerificationException.class, () -> analyze(""" - from test - | enrich language on bar - """)); - assertThat(e.getMessage(), containsString("unresolved enrich policy [language], did you mean [languages]")); + e = expectThrows(VerificationException.class, () -> analyze("from test | ENRICH foo", analyzer)); + assertThat(e.getMessage(), containsString("foo-error-101")); } public void testEnrichPolicyMatchFieldName() { @@ -1454,10 +1482,18 @@ public void testEnrichFieldsIncludeMatchField() { """; IndexResolution testIndex = loadMapping("mapping-basic.json", "test"); IndexResolution languageIndex = loadMapping("mapping-languages.json", "languages"); - var enrichPolicy = new EnrichPolicy("match", null, List.of("unused"), "language_code", List.of("language_code", "language_name")); - EnrichResolution enrichResolution = new EnrichResolution( - Set.of(new EnrichPolicyResolution("languages", enrichPolicy, languageIndex)), - Set.of("languages") + EnrichResolution enrichResolution = new EnrichResolution(); + Map enrichIndices = Map.of("", "languages"); + enrichResolution.addResolvedPolicy( + "languages", + Enrich.Mode.ANY, + new ResolvedEnrichPolicy( + "language_code", + "match", + List.of("language_code", "language_name"), + enrichIndices, + languageIndex.get().mapping() + ) ); AnalyzerContext context = new AnalyzerContext(configuration(query), new EsqlFunctionRegistry(), testIndex, enrichResolution); Analyzer analyzer = new Analyzer(context, TEST_VERIFIER); @@ -1484,6 +1520,35 @@ public void testMissingAttributeException_InChainedEval() { assertThat(e.getMessage(), containsString("Unknown column [x5], did you mean any of [x1, x2, x3]?")); } + public void testInsensitiveEqualsWrongType() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + | where first_name =~ 12 + """)); + assertThat( + e.getMessage(), + containsString("second argument of [first_name =~ 12] must be [string], found value [12] type [integer]") + ); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test + | where first_name =~ languages + """)); + assertThat( + e.getMessage(), + containsString("second argument of [first_name =~ languages] must be [string], found value [languages] type [integer]") + ); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test + | where languages =~ "foo" + """)); + assertThat( + e.getMessage(), + containsString("first argument of [languages =~ \"foo\"] must be [string], found value [languages] type [integer]") + ); + } + public void testUnresolvedMvExpand() { var e = expectThrows(VerificationException.class, () -> analyze("row foo = 1 | mv_expand bar")); assertThat(e.getMessage(), containsString("Unknown column [bar]")); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8c583e0164d47..dc2cf4bdb50af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -70,17 +70,13 @@ public void testAggsExpressionsInStatsAggs() { error("from test | stats length(first_name), count(1) by first_name") ); assertEquals( - "1:19: aggregate function's field must be an attribute or literal; found [emp_no / 2] of type [Div]", - error("from test | stats x = avg(emp_no / 2) by emp_no") + "1:23: nested aggregations [max(salary)] not allowed inside other aggregations [max(max(salary))]", + error("from test | stats max(max(salary)) by first_name") ); assertEquals( "1:25: argument of [avg(first_name)] must be [numeric], found value [first_name] type [keyword]", error("from test | stats count(avg(first_name)) by first_name") ); - assertEquals( - "1:19: aggregate function's field must be an attribute or literal; found [length(first_name)] of type [Length]", - error("from test | stats count(length(first_name)) by first_name") - ); assertEquals( "1:23: expected an aggregate function or group but got [emp_no + avg(emp_no)] of type [Add]", error("from test | stats x = emp_no + avg(emp_no) by emp_no") @@ -95,6 +91,17 @@ public void testAggsExpressionsInStatsAggs() { ); } + public void testAggsInsideGrouping() { + assertEquals( + "1:36: cannot use an aggregate [max(languages)] for grouping", + error("from test| stats max(languages) by max(languages)") + ); + } + + public void testAggsInsideEval() throws Exception { + assertEquals("1:29: aggregate function [max(b)] not allowed outside STATS command", error("row a = 1, b = 2 | eval x = max(b)")); + } + public void testDoubleRenamingField() { assertEquals( "1:44: Column [emp_no] renamed to [r1] and is no longer available [emp_no as r3]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java new file mode 100644 index 0000000000000..136a634095486 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java @@ -0,0 +1,512 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.FilterClient; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.VersionInformation; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.AbstractSimpleTransportTestCase; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.EnrichMetadata; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; +import org.elasticsearch.xpack.ql.index.IndexResolver; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.transport.RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class EnrichPolicyResolverTests extends ESTestCase { + + private final Map transports = new HashMap<>(); + private TestThreadPool threadPool; + private TestEnrichPolicyResolver localCluster; + private TestEnrichPolicyResolver clusterA; + private TestEnrichPolicyResolver clusterB; + + @After + public void stopClusters() { + transports.values().forEach(TransportService::stop); + terminate(threadPool); + } + + @Before + public void setUpClusters() { + threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, "esql", between(1, 8), 1024, "esql", EsExecutors.TaskTrackingConfig.DEFAULT) + ); + for (String cluster : List.of("", "cluster_a", "cluster_b")) { + var transport = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ); + transport.acceptIncomingRequests(); + transport.start(); + transports.put(cluster, transport); + } + AbstractSimpleTransportTestCase.connectToNode(transports.get(""), transports.get("cluster_a").getLocalNode()); + AbstractSimpleTransportTestCase.connectToNode(transports.get(""), transports.get("cluster_b").getLocalNode()); + localCluster = newEnrichPolicyResolver(LOCAL_CLUSTER_GROUP_KEY); + clusterA = newEnrichPolicyResolver("cluster_a"); + clusterB = newEnrichPolicyResolver("cluster_b"); + + // hosts policies are the same across clusters + var hostsPolicy = new EnrichPolicy("match", null, List.of(), "ip", List.of("region", "cost")); + var hostsMapping = Map.of("ip", "ip", "region", "keyword", "cost", "long"); + localCluster.aliases.put(".enrich-hosts", ".enrich-hosts-123"); + localCluster.mappings.put(".enrich-hosts-123", hostsMapping); + localCluster.policies.put("hosts", hostsPolicy); + + clusterA.aliases.put(".enrich-hosts", ".enrich-hosts-999"); + clusterA.mappings.put(".enrich-hosts-999", hostsMapping); + clusterA.policies.put("hosts", hostsPolicy); + + clusterB.aliases.put(".enrich-hosts", ".enrich-hosts-100"); + clusterB.mappings.put(".enrich-hosts-100", hostsMapping); + clusterB.policies.put("hosts", hostsPolicy); + + // addresses policies are compatible across clusters + var addressPolicy = new EnrichPolicy("match", null, List.of(), "emp_id", List.of("country", "city")); + var addressPolicyA = new EnrichPolicy("match", null, List.of(), "emp_id", List.of("country", "city", "state")); + var addressPolicyB = new EnrichPolicy("match", null, List.of(), "emp_id", List.of("country", "city")); + + var addressMapping = Map.of("emp_id", "long", "country", "keyword", "city", "keyword"); + var addressMappingA = Map.of("emp_id", "long", "country", "keyword", "city", "keyword", "state", "keyword"); + var addressMappingB = Map.of("emp_id", "long", "country", "keyword", "city", "keyword"); + + localCluster.aliases.put(".enrich-address", ".enrich-address-1001"); + localCluster.mappings.put(".enrich-address-1001", addressMapping); + localCluster.policies.put("address", addressPolicy); + + clusterA.aliases.put(".enrich-address", ".enrich-address-1002"); + clusterA.mappings.put(".enrich-address-1002", addressMappingA); + clusterA.policies.put("address", addressPolicyA); + + clusterB.aliases.put(".enrich-address", ".enrich-address-1003"); + clusterB.mappings.put(".enrich-address-1003", addressMappingB); + clusterB.policies.put("address", addressPolicyB); + + // authors are not compatible + var authorPolicy = new EnrichPolicy("match", null, List.of(), "author", List.of("name", "address")); + var authorPolicyA = new EnrichPolicy("range", null, List.of(), "author", List.of("name", "address")); + var authorPolicyB = new EnrichPolicy("match", null, List.of(), "author", List.of("name", "address")); + + var authorMapping = Map.of("author", "keyword", "name", "text", "address", "text"); + var authorMappingA = Map.of("author", "long", "name", "text", "address", "text"); + var authorMappingB = Map.of("author", "long", "name", "text", "address", "text"); + + localCluster.aliases.put(".enrich-author", ".enrich-author-X"); + localCluster.mappings.put(".enrich-author-X", authorMapping); + localCluster.policies.put("author", authorPolicy); + + clusterA.aliases.put(".enrich-author", ".enrich-author-A"); + clusterA.mappings.put(".enrich-author-A", authorMappingA); + clusterA.policies.put("author", authorPolicyA); + + clusterB.aliases.put(".enrich-author", ".enrich-author-B"); + clusterB.mappings.put(".enrich-author-B", authorMappingB); + clusterB.policies.put("author", authorPolicyB); + } + + private void assertHostPolicies(ResolvedEnrichPolicy resolved) { + assertNotNull(resolved); + assertThat(resolved.matchField(), equalTo("ip")); + assertThat(resolved.enrichFields(), equalTo(List.of("region", "cost"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("ip", "region", "cost")); + } + + public void testLocalHosts() { + for (Enrich.Mode mode : Enrich.Mode.values()) { + Set clusters = Set.of(LOCAL_CLUSTER_GROUP_KEY); + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("hosts", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("hosts", mode); + assertHostPolicies(resolved); + assertThat(resolved.concreteIndices(), equalTo(Map.of("", ".enrich-hosts-123"))); + } + } + + public void testRemoteHosts() { + Set clusters = Set.of("cluster_a", "cluster_b"); + for (Enrich.Mode mode : Enrich.Mode.values()) { + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("hosts", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("hosts", mode); + assertHostPolicies(resolved); + var expectedIndices = switch (mode) { + case COORDINATOR -> Map.of("", ".enrich-hosts-123"); + case ANY -> Map.of("", ".enrich-hosts-123", "cluster_a", ".enrich-hosts-999", "cluster_b", ".enrich-hosts-100"); + case REMOTE -> Map.of("cluster_a", ".enrich-hosts-999", "cluster_b", ".enrich-hosts-100"); + }; + assertThat(resolved.concreteIndices(), equalTo(expectedIndices)); + } + } + + public void testMixedHosts() { + Set clusters = Set.of(LOCAL_CLUSTER_GROUP_KEY, "cluster_a", "cluster_b"); + for (Enrich.Mode mode : Enrich.Mode.values()) { + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("hosts", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("hosts", mode); + assertHostPolicies(resolved); + var expectedIndices = switch (mode) { + case COORDINATOR -> Map.of("", ".enrich-hosts-123"); + case ANY, REMOTE -> Map.of("", ".enrich-hosts-123", "cluster_a", ".enrich-hosts-999", "cluster_b", ".enrich-hosts-100"); + }; + assertThat(mode.toString(), resolved.concreteIndices(), equalTo(expectedIndices)); + } + } + + public void testLocalAddress() { + for (Enrich.Mode mode : Enrich.Mode.values()) { + Set clusters = Set.of(LOCAL_CLUSTER_GROUP_KEY); + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("address", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("address", mode); + assertNotNull(resolved); + assertThat(resolved.matchField(), equalTo("emp_id")); + assertThat(resolved.enrichFields(), equalTo(List.of("country", "city"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("emp_id", "country", "city")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("", ".enrich-address-1001"))); + } + { + List clusters = randomSubsetOf(between(1, 3), List.of("", "cluster_a", "cluster_a")); + var mode = Enrich.Mode.COORDINATOR; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("address", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("address", mode); + assertNotNull(resolved); + assertThat(resolved.matchField(), equalTo("emp_id")); + assertThat(resolved.enrichFields(), equalTo(List.of("country", "city"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("emp_id", "country", "city")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("", ".enrich-address-1001"))); + } + } + + public void testRemoteAddress() { + Set clusters = Set.of("cluster_a", "cluster_b"); + for (Enrich.Mode mode : List.of(Enrich.Mode.ANY, Enrich.Mode.REMOTE)) { + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("address", mode))); + assertNull(resolution.getResolvedPolicy("address", mode)); + var msg = "enrich policy [address] has different enrich fields across clusters; " + + "these fields are missing in some policies: [state]"; + assertThat(resolution.getError("address", mode), equalTo(msg)); + } + } + + public void testMixedAddress() { + Set clusters = Set.of(LOCAL_CLUSTER_GROUP_KEY, "cluster_a", "cluster_b"); + for (Enrich.Mode mode : List.of(Enrich.Mode.ANY, Enrich.Mode.REMOTE)) { + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("hosts", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("hosts", mode); + assertHostPolicies(resolved); + assertThat( + mode.toString(), + resolved.concreteIndices(), + equalTo(Map.of("", ".enrich-hosts-123", "cluster_a", ".enrich-hosts-999", "cluster_b", ".enrich-hosts-100")) + ); + } + } + + public void testLocalAuthor() { + for (Enrich.Mode mode : Enrich.Mode.values()) { + Set clusters = Set.of(LOCAL_CLUSTER_GROUP_KEY); + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("author", mode); + assertNotNull(resolved); + assertThat(resolved.matchField(), equalTo("author")); + assertThat(resolved.enrichFields(), equalTo(List.of("name", "address"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("author", "name", "address")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("", ".enrich-author-X"))); + } + { + var mode = Enrich.Mode.COORDINATOR; + var clusters = randomSubsetOf(between(1, 3), Set.of("", "cluster_a", "cluster_b")); + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("author", mode); + assertNotNull(resolved); + assertThat(resolved.matchField(), equalTo("author")); + assertThat(resolved.matchType(), equalTo("match")); + assertThat(resolved.enrichFields(), equalTo(List.of("name", "address"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("author", "name", "address")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("", ".enrich-author-X"))); + } + } + + public void testAuthorClusterA() { + Set clusters = Set.of("cluster_a"); + { + var mode = Enrich.Mode.ANY; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("enrich policy [author] has different match types [match, range] across clusters") + ); + } + { + var mode = Enrich.Mode.REMOTE; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("author", mode); + assertNotNull(resolved); + assertThat(resolved.matchType(), equalTo("range")); + assertThat(resolved.matchField(), equalTo("author")); + assertThat(resolved.enrichFields(), equalTo(List.of("name", "address"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("author", "name", "address")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("cluster_a", ".enrich-author-A"))); + } + } + + public void testAuthorClusterB() { + Set clusters = Set.of("cluster_b"); + { + var mode = Enrich.Mode.ANY; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("field [author] of enrich policy [author] has different data types [KEYWORD, LONG] across clusters") + ); + } + { + var mode = Enrich.Mode.REMOTE; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + ResolvedEnrichPolicy resolved = resolution.getResolvedPolicy("author", mode); + assertNotNull(resolved); + assertThat(resolved.matchType(), equalTo("match")); + assertThat(resolved.matchField(), equalTo("author")); + assertThat(resolved.enrichFields(), equalTo(List.of("name", "address"))); + assertThat(resolved.mapping().keySet(), containsInAnyOrder("author", "name", "address")); + assertThat(resolved.concreteIndices(), equalTo(Map.of("cluster_b", ".enrich-author-B"))); + } + } + + public void testAuthorClusterAAndClusterB() { + Set clusters = Set.of("cluster_a", "cluster_b"); + { + var mode = Enrich.Mode.ANY; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("enrich policy [author] has different match types [match, range] across clusters") + ); + } + { + var mode = Enrich.Mode.REMOTE; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("enrich policy [author] has different match types [range, match] across clusters") + ); + } + } + + public void testLocalAndClusterBAuthor() { + Set clusters = Set.of("", "cluster_b"); + { + var mode = Enrich.Mode.ANY; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("field [author] of enrich policy [author] has different data types [KEYWORD, LONG] across clusters") + ); + } + { + var mode = Enrich.Mode.REMOTE; + var resolution = localCluster.resolvePolicies(clusters, List.of(new EnrichPolicyResolver.UnresolvedPolicy("author", mode))); + assertNull(resolution.getResolvedPolicy("author", mode)); + assertThat( + resolution.getError("author", mode), + equalTo("field [author] of enrich policy [author] has different data types [KEYWORD, LONG] across clusters") + ); + } + } + + public void testMissingLocalPolicy() { + for (Enrich.Mode mode : Enrich.Mode.values()) { + var resolution = localCluster.resolvePolicies(Set.of(""), List.of(new EnrichPolicyResolver.UnresolvedPolicy("authoz", mode))); + assertNull(resolution.getResolvedPolicy("authoz", mode)); + assertThat(resolution.getError("authoz", mode), equalTo("enrich policy [authoz] doesn't exist, did you mean [author]?")); + } + } + + public void testMissingRemotePolicy() { + { + var mode = Enrich.Mode.REMOTE; + var resolution = localCluster.resolvePolicies( + Set.of("cluster_a"), + List.of(new EnrichPolicyResolver.UnresolvedPolicy("addrezz", mode)) + ); + assertNull(resolution.getResolvedPolicy("addrezz", mode)); + assertThat(resolution.getError("addrezz", mode), equalTo("enrich policy [addrezz] doesn't exist on clusters [cluster_a]")); + } + { + var mode = Enrich.Mode.ANY; + var resolution = localCluster.resolvePolicies( + Set.of("cluster_a"), + List.of(new EnrichPolicyResolver.UnresolvedPolicy("addrezz", mode)) + ); + assertNull(resolution.getResolvedPolicy("addrezz", mode)); + assertThat( + resolution.getError("addrezz", mode), + equalTo("enrich policy [addrezz] doesn't exist on clusters [_local, cluster_a]") + ); + } + } + + TestEnrichPolicyResolver newEnrichPolicyResolver(String cluster) { + return new TestEnrichPolicyResolver(cluster, new HashMap<>(), new HashMap<>(), new HashMap<>()); + } + + class TestEnrichPolicyResolver extends EnrichPolicyResolver { + final String cluster; + final Map policies; + final Map aliases; + final Map> mappings; + + TestEnrichPolicyResolver( + String cluster, + Map policies, + Map aliases, + Map> mappings + ) { + super( + mockClusterService(policies), + transports.get(cluster), + new IndexResolver(new FieldCapsClient(threadPool, aliases, mappings), cluster, EsqlDataTypeRegistry.INSTANCE, Set::of) + ); + this.policies = policies; + this.cluster = cluster; + this.aliases = aliases; + this.mappings = mappings; + } + + EnrichResolution resolvePolicies(Collection clusters, Collection unresolvedPolicies) { + PlainActionFuture future = new PlainActionFuture<>(); + if (randomBoolean()) { + unresolvedPolicies = new ArrayList<>(unresolvedPolicies); + for (Enrich.Mode mode : Enrich.Mode.values()) { + for (String policy : List.of("hosts", "address", "author")) { + if (randomBoolean()) { + unresolvedPolicies.add(new UnresolvedPolicy(policy, mode)); + } + } + } + if (randomBoolean()) { + unresolvedPolicies.add(new UnresolvedPolicy("legacy-policy-1", randomFrom(Enrich.Mode.values()))); + } + } + super.resolvePolicies(clusters, unresolvedPolicies, future); + return future.actionGet(30, TimeUnit.SECONDS); + } + + @Override + protected Transport.Connection getRemoteConnection(String remoteCluster) { + assertThat("Must only called on the local cluster", cluster, equalTo(LOCAL_CLUSTER_GROUP_KEY)); + return transports.get("").getConnection(transports.get(remoteCluster).getLocalDiscoNode()); + } + + static ClusterService mockClusterService(Map policies) { + ClusterService clusterService = mock(ClusterService.class); + EnrichMetadata enrichMetadata = new EnrichMetadata(policies); + ClusterState state = ClusterState.builder(new ClusterName("test")) + .metadata(Metadata.builder().customs(Map.of(EnrichMetadata.TYPE, enrichMetadata))) + .build(); + when(clusterService.state()).thenReturn(state); + return clusterService; + } + } + + static class FieldCapsClient extends FilterClient { + final Map aliases; + final Map> mappings; + + FieldCapsClient(ThreadPool threadPool, Map aliases, Map> mappings) { + super(new NoOpClient(threadPool)); + this.aliases = aliases; + this.mappings = mappings; + } + + @Override + @SuppressWarnings("unchecked") + protected void doExecute( + ActionType action, + Request transportRequest, + ActionListener listener + ) { + assertThat(transportRequest, instanceOf(FieldCapabilitiesRequest.class)); + FieldCapabilitiesRequest r = (FieldCapabilitiesRequest) transportRequest; + assertThat(r.indices(), arrayWithSize(1)); + String alias = aliases.get(r.indices()[0]); + assertNotNull(alias); + Map mapping = mappings.get(alias); + if (mapping != null) { + Map> fieldCaps = new HashMap<>(); + for (Map.Entry e : mapping.entrySet()) { + var f = new FieldCapabilities( + e.getKey(), + e.getValue(), + false, + false, + false, + true, + null, + new String[] { alias }, + null, + null, + null, + null, + Map.of() + ); + fieldCaps.put(e.getKey(), Map.of(e.getValue(), f)); + } + listener.onResponse((Response) new FieldCapabilitiesResponse(new String[] { alias }, fieldCaps)); + } else { + listener.onResponse((Response) new FieldCapabilitiesResponse(new String[0], Map.of())); + } + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsTests.java new file mode 100644 index 0000000000000..e1fd214b63b66 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InsensitiveEqualsTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.TestUtils; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; + +import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; + +public class InsensitiveEqualsTests extends ESTestCase { + + public void testFold() { + assertTrue(insensitiveEquals(l("foo"), l("foo")).fold()); + assertTrue(insensitiveEquals(l("Foo"), l("foo")).fold()); + assertTrue(insensitiveEquals(l("Foo"), l("fOO")).fold()); + assertTrue(insensitiveEquals(l("foo*"), l("foo*")).fold()); + assertTrue(insensitiveEquals(l("foo*"), l("FOO*")).fold()); + assertTrue(insensitiveEquals(l("foo?bar"), l("foo?bar")).fold()); + assertTrue(insensitiveEquals(l("foo?bar"), l("FOO?BAR")).fold()); + assertFalse(insensitiveEquals(l("Foo"), l("fo*")).fold()); + assertFalse(insensitiveEquals(l("Fox"), l("fo?")).fold()); + assertFalse(insensitiveEquals(l("Foo"), l("*OO")).fold()); + assertFalse(insensitiveEquals(l("BarFooBaz"), l("*O*")).fold()); + assertFalse(insensitiveEquals(l("BarFooBaz"), l("bar*baz")).fold()); + assertFalse(insensitiveEquals(l("foo"), l("*")).fold()); + + assertFalse(insensitiveEquals(l("foo*bar"), l("foo\\*bar")).fold()); + assertFalse(insensitiveEquals(l("foo?"), l("foo\\?")).fold()); + assertFalse(insensitiveEquals(l("foo?bar"), l("foo\\?bar")).fold()); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(10)), l("*")).fold()); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("???")).fold()); + + assertFalse(insensitiveEquals(l("foo"), l("bar")).fold()); + assertFalse(insensitiveEquals(l("foo"), l("ba*")).fold()); + assertFalse(insensitiveEquals(l("foo"), l("*a*")).fold()); + assertFalse(insensitiveEquals(l(""), l("bar")).fold()); + assertFalse(insensitiveEquals(l("foo"), l("")).fold()); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("??")).fold()); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("????")).fold()); + + assertNull(insensitiveEquals(l("foo"), Literal.NULL).fold()); + assertNull(insensitiveEquals(Literal.NULL, l("foo")).fold()); + assertNull(insensitiveEquals(Literal.NULL, Literal.NULL).fold()); + } + + public void testProcess() { + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("foo"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("Foo"), BytesRefs.toBytesRef("foo"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("Foo"), BytesRefs.toBytesRef("fOO"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("foo*"), BytesRefs.toBytesRef("foo*"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("foo*"), BytesRefs.toBytesRef("FOO*"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("foo?bar"), BytesRefs.toBytesRef("foo?bar"))); + assertTrue(InsensitiveEquals.process(BytesRefs.toBytesRef("foo?bar"), BytesRefs.toBytesRef("FOO?BAR"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("Foo"), BytesRefs.toBytesRef("fo*"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("Fox"), BytesRefs.toBytesRef("fo?"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("Foo"), BytesRefs.toBytesRef("*OO"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("BarFooBaz"), BytesRefs.toBytesRef("*O*"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("BarFooBaz"), BytesRefs.toBytesRef("bar*baz"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("*"))); + + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo*bar"), BytesRefs.toBytesRef("foo\\*bar"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo?"), BytesRefs.toBytesRef("foo\\?"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo?bar"), BytesRefs.toBytesRef("foo\\?bar"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef(randomAlphaOfLength(10)), BytesRefs.toBytesRef("*"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef(randomAlphaOfLength(3)), BytesRefs.toBytesRef("???"))); + + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("ba*"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("*a*"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef(""), BytesRefs.toBytesRef("bar"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef(""))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef(randomAlphaOfLength(3)), BytesRefs.toBytesRef("??"))); + assertFalse(InsensitiveEquals.process(BytesRefs.toBytesRef(randomAlphaOfLength(3)), BytesRefs.toBytesRef("????"))); + } + + protected InsensitiveEquals insensitiveEquals(Expression left, Expression right) { + return new InsensitiveEquals(EMPTY, left, right); + } + + private static Literal l(Object value) { + return TestUtils.of(EMPTY, value); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index a311b1e40913b..dded86fdd8aee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.optimizer.FoldNull; import org.elasticsearch.xpack.esql.parser.ExpressionBuilder; @@ -127,8 +128,10 @@ public static Literal randomLiteral(DataType type) { case "time_duration" -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days case "text" -> new BytesRef(randomAlphaOfLength(50)); case "version" -> randomVersion().toBytesRef(); - case "geo_point" -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()); - case "cartesian_point" -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()); + case "geo_point" -> GEO.asWkb(GeometryTestUtils.randomPoint()); + case "cartesian_point" -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()); + case "geo_shape" -> GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean())); + case "cartesian_shape" -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())); case "null" -> null; case "_source" -> { try { @@ -470,9 +473,11 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert // Note: the null-in-fast-null-out handling prevents any exception from being thrown, so the warnings provided in some test // cases won't actually be registered. This isn't an issue for unary functions, but could be an issue for n-ary ones, if // function processing of the first parameter(s) could raise an exception/warning. (But hasn't been the case so far.) - // For n-ary functions, dealing with one multivalue (before hitting the null parameter injected above) will now trigger + // N-ary non-MV functions dealing with one multivalue (before hitting the null parameter injected above) will now trigger // a warning ("SV-function encountered a MV") that thus needs to be checked. - if (simpleData.stream().anyMatch(List.class::isInstance) && testCase.getExpectedWarnings() != null) { + if (this instanceof AbstractMultivalueFunctionTestCase == false + && simpleData.stream().anyMatch(List.class::isInstance) + && testCase.getExpectedWarnings() != null) { assertWarnings(testCase.getExpectedWarnings()); } } @@ -538,17 +543,22 @@ public final void testFold() { return; } assertFalse(expression.typeResolved().unresolved()); - expression = new FoldNull().rule(expression); - assertThat(expression.dataType(), equalTo(testCase.expectedType)); - assertTrue(expression.foldable()); - Object result = expression.fold(); - // Decode unsigned longs into BigIntegers - if (testCase.expectedType == DataTypes.UNSIGNED_LONG && result != null) { - result = NumericUtils.unsignedLongAsBigInteger((Long) result); - } - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); + Expression nullOptimized = new FoldNull().rule(expression); + assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType)); + assertTrue(nullOptimized.foldable()); + if (testCase.foldingExceptionClass() == null) { + Object result = nullOptimized.fold(); + // Decode unsigned longs into BigIntegers + if (testCase.expectedType == DataTypes.UNSIGNED_LONG && result != null) { + result = NumericUtils.unsignedLongAsBigInteger((Long) result); + } + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } else { + Throwable t = expectThrows(testCase.foldingExceptionClass(), nullOptimized::fold); + assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); } } @@ -588,6 +598,8 @@ public static void testFunctionInfo() { EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); List args = description.args(); + assertTrue("expect description to be defined", description.description() != null && description.description().length() > 0); + List> typesFromSignature = new ArrayList<>(); Set returnFromSignature = new HashSet<>(); for (int i = 0; i < args.size(); i++) { @@ -603,21 +615,16 @@ public static void testFunctionInfo() { for (int i = 0; i < args.size(); i++) { Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>())); - if (annotationTypes.equals(Set.of("?"))) { - continue; // TODO remove this eventually, so that all the functions will have to provide signature info - } Set signatureTypes = typesFromSignature.get(i); if (signatureTypes.isEmpty()) { continue; } - assertEquals(annotationTypes, signatureTypes); + assertEquals(signatureTypes, annotationTypes); } Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>())); - if (returnTypes.equals(Set.of("?")) == false) { - // TODO remove this eventually, so that all the functions will have to provide signature info - assertEquals(returnTypes, returnFromSignature); - } + assertEquals(returnFromSignature, returnTypes); + } /** @@ -952,7 +959,15 @@ protected static String typeErrorMessage(boolean includeOrdinal, List expectedValue.apply((BytesRef) n), warnings); } + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#GEO_SHAPE}. + */ + public static void forUnaryGeoShape( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, geoShapeCases(), expectedType, n -> expectedValue.apply((BytesRef) n), warnings); + } + + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#CARTESIAN_SHAPE}. + */ + public static void forUnaryCartesianShape( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, cartesianShapeCases(), expectedType, n -> expectedValue.apply((BytesRef) n), warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#IP}. */ @@ -913,17 +939,31 @@ public static List timeDurationCases() { } private static List geoPointCases() { + return List.of(new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT)); + } + + private static List cartesianPointCases() { return List.of( - new TypedDataSupplier("", () -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT) + new TypedDataSupplier("", () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), EsqlDataTypes.CARTESIAN_POINT) ); } - private static List cartesianPointCases() { + private static List geoShapeCases() { + return List.of( + new TypedDataSupplier( + "", + () -> GEO.asWkb(GeometryTestUtils.randomGeometry(ESTestCase.randomBoolean())), + EsqlDataTypes.GEO_SHAPE + ) + ); + } + + private static List cartesianShapeCases() { return List.of( new TypedDataSupplier( - "", - () -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), - EsqlDataTypes.CARTESIAN_POINT + "", + () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(ESTestCase.randomBoolean())), + EsqlDataTypes.CARTESIAN_SHAPE ) ); } @@ -1082,6 +1122,9 @@ public static class TestCase { */ private String[] expectedWarnings; + private Class foldingExceptionClass; + private String foldingExceptionMessage; + private final String expectedTypeError; private final boolean allTypesAreRepresentable; @@ -1147,6 +1190,14 @@ public String[] getExpectedWarnings() { return expectedWarnings; } + public Class foldingExceptionClass() { + return foldingExceptionClass; + } + + public String foldingExceptionMessage() { + return foldingExceptionMessage; + } + public String getExpectedTypeError() { return expectedTypeError; } @@ -1161,6 +1212,12 @@ public TestCase withWarning(String warning) { } return new TestCase(data, evaluatorToString, expectedType, matcher, newWarnings, expectedTypeError); } + + public TestCase withFoldingException(Class clazz, String message) { + foldingExceptionClass = clazz; + foldingExceptionMessage = message; + return this; + } } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 3144cc4e6940a..88910320c962e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -46,7 +46,7 @@ public static Iterable parameters() { EsqlDataTypes.CARTESIAN_POINT, bytesRef -> null, bytesRef -> { - var exception = expectThrows(Exception.class, () -> CARTESIAN.stringAsWKB(bytesRef.utf8ToString())); + var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception @@ -60,12 +60,12 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(CARTESIAN.pointAsString(ShapeTestUtils.randomPoint())), + () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomPoint())), DataTypes.KEYWORD ) ), EsqlDataTypes.CARTESIAN_POINT, - bytesRef -> CARTESIAN.stringAsWKB(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java new file mode 100644 index 0000000000000..117968de5148f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianShapeTests extends AbstractFunctionTestCase { + public ToCartesianShapeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToCartesianShape" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, EsqlDataTypes.CARTESIAN_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryCartesianShape(suppliers, attribute, EsqlDataTypes.CARTESIAN_SHAPE, v -> v, List.of()); + // random strings that don't look like a cartesian shape + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.CARTESIAN_SHAPE, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are cartesian_shape representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(CARTESIAN.asWkt(GeometryTestUtils.randomGeometry(randomBoolean()))), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.CARTESIAN_SHAPE, + bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToCartesianShape(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 9c1a2b3002ec4..4a5534e1d5d1a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -46,7 +46,7 @@ public static Iterable parameters() { EsqlDataTypes.GEO_POINT, bytesRef -> null, bytesRef -> { - var exception = expectThrows(Exception.class, () -> GEO.stringAsWKB(bytesRef.utf8ToString())); + var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception @@ -60,12 +60,12 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(GEO.pointAsString(GeometryTestUtils.randomPoint())), + () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomPoint())), DataTypes.KEYWORD ) ), EsqlDataTypes.GEO_POINT, - bytesRef -> GEO.stringAsWKB(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java new file mode 100644 index 0000000000000..15db74d71d21f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoShapeTests extends AbstractFunctionTestCase { + public ToGeoShapeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToGeoShape" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, EsqlDataTypes.GEO_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryGeoShape(suppliers, attribute, EsqlDataTypes.GEO_SHAPE, v -> v, List.of()); + // random strings that don't look like a geo shape + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.GEO_SHAPE, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are geo_shape representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomGeometry(randomBoolean()))), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.GEO_SHAPE, + bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToGeoShape(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index 1c2488c8e9cb5..030c219b75e2f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -24,9 +24,6 @@ import java.util.function.Function; import java.util.function.Supplier; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; - public class ToLongTests extends AbstractFunctionTestCase { public ToLongTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -43,15 +40,6 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.LONG, b -> b ? 1L : 0L, List.of()); - // geo types - TestCaseSupplier.forUnaryGeoPoint(suppliers, evaluatorName.apply("GeoPoint"), DataTypes.LONG, GEO::wkbAsLong, List.of()); - TestCaseSupplier.forUnaryCartesianPoint( - suppliers, - evaluatorName.apply("CartesianPoint"), - DataTypes.LONG, - CARTESIAN::wkbAsLong, - List.of() - ); // datetimes TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.LONG, Instant::toEpochMilli, List.of()); // random strings that don't look like a long diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 46721c190c7b6..9d5eed2ca2ebe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -91,14 +91,28 @@ public static Iterable parameters() { suppliers, "ToStringFromGeoPointEvaluator[field=" + read + "]", DataTypes.KEYWORD, - wkb -> new BytesRef(GEO.wkbAsString(wkb)), + wkb -> new BytesRef(GEO.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryCartesianPoint( suppliers, "ToStringFromCartesianPointEvaluator[field=" + read + "]", DataTypes.KEYWORD, - wkb -> new BytesRef(CARTESIAN.wkbAsString(wkb)), + wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), + List.of() + ); + TestCaseSupplier.forUnaryGeoShape( + suppliers, + "ToStringFromGeoShapeEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + wkb -> new BytesRef(GEO.wkbToWkt(wkb)), + List.of() + ); + TestCaseSupplier.forUnaryCartesianShape( + suppliers, + "ToStringFromCartesianShapeEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryIp( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 1446fc54c99fa..3a6a5d8eabae3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -30,6 +30,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class DateExtractTests extends AbstractScalarFunctionTestCase { public DateExtractTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -38,17 +39,39 @@ public DateExtractTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Date Extract Year", () -> { - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("YEAR"), DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + equalTo(2023L) + ) ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, - equalTo(2023L) - ); - }))); + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("not a unit"), DataTypes.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(0L, DataTypes.DATETIME, "date") + + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: No enum constant java.time.temporal.ChronoField.NOT A UNIT" + ) + .withFoldingException(InvalidArgumentException.class, "invalid date field for []: not a unit") + ) + ) + ); } public void testAllChronoFields() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 48d8079ace477..ae53f2e81d158 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -25,6 +25,8 @@ import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class DateParseTests extends AbstractScalarFunctionTestCase { @@ -59,6 +61,43 @@ public static Iterable parameters() { DataTypes.DATETIME, equalTo(1683244800000L) ) + ), + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("not a format"), DataTypes.KEYWORD, "second"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.KEYWORD, "first") + + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: Invalid format: [not a format]: Unknown pattern letter: o" + ) + .withFoldingException( + InvalidArgumentException.class, + "invalid date pattern for []: Invalid format: [not a format]: Unknown pattern letter: o" + ) + ), + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.KEYWORD, "second"), + new TestCaseSupplier.TypedData(new BytesRef("not a date"), DataTypes.KEYWORD, "first") + + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: " + + "failed to parse date field [not a date] with format [yyyy-MM-dd]" + ) ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java index 043bf083b580a..013753c801c39 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java @@ -84,8 +84,8 @@ private Expression build(Source source, Expression arg) { Literal from; Literal to; if (arg.dataType() == DataTypes.DATETIME) { - from = new Literal(Source.EMPTY, new BytesRef("2023-02-01T00:00:00.00Z"), DataTypes.KEYWORD); - to = new Literal(Source.EMPTY, new BytesRef("2023-03-01T00:00:00.00Z"), DataTypes.KEYWORD); + from = stringOrDateTime("2023-02-01T00:00:00.00Z"); + to = stringOrDateTime("2023-03-01T09:00:00.00Z"); } else { from = new Literal(Source.EMPTY, 0, DataTypes.DOUBLE); to = new Literal(Source.EMPTY, 1000, DataTypes.DOUBLE); @@ -93,6 +93,13 @@ private Expression build(Source source, Expression arg) { return new AutoBucket(source, arg, new Literal(Source.EMPTY, 50, DataTypes.INTEGER), from, to); } + private Literal stringOrDateTime(String date) { + if (randomBoolean()) { + return new Literal(Source.EMPTY, new BytesRef(date), randomBoolean() ? DataTypes.KEYWORD : DataTypes.TEXT); + } + return new Literal(Source.EMPTY, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date), DataTypes.DATETIME); + } + @Override protected DataType expectedType(List argTypes) { if (argTypes.get(0).isNumeric()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 6f0a2edafaf04..fbe146c66f27d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; -import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -415,7 +415,7 @@ protected static void geoPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - points(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); + spatial(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); } /** @@ -443,7 +443,7 @@ protected static void cartesianPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - points( + spatial( cases, name, evaluatorName, @@ -456,20 +456,68 @@ protected static void cartesianPoints( } /** - * Build many test cases with either {@code geo_point} or {@code cartesian_point} values. + * Build many test cases with {@code geo_shape} values that are converted to another type. + * This assumes that the function consumes {@code geo_shape} values and produces another type. + * For example, mv_count() can consume geo_shapes and produce an integer count. */ - protected static void points( + protected static void geoShape( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction, Matcher> matcher + ) { + spatial( + cases, + name, + evaluatorName, + EsqlDataTypes.GEO_SHAPE, + expectedDataType, + GEO, + () -> rarely() ? GeometryTestUtils.randomGeometry(randomBoolean()) : GeometryTestUtils.randomPoint(), + matcher + ); + } + + /** + * Build many test cases with {@code cartesian_shape} values that are converted to another type. + * This assumes that the function consumes {@code cartesian_shape} values and produces another type. + * For example, mv_count() can consume cartesian shapes and produce an integer count. + */ + protected static void cartesianShape( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction, Matcher> matcher + ) { + spatial( + cases, + name, + evaluatorName, + EsqlDataTypes.CARTESIAN_SHAPE, + expectedDataType, + CARTESIAN, + () -> rarely() ? ShapeTestUtils.randomGeometry(randomBoolean()) : ShapeTestUtils.randomPoint(), + matcher + ); + } + + /** + * Build many test cases for spatial values + */ + protected static void spatial( List cases, String name, String evaluatorName, DataType dataType, DataType expectedDataType, SpatialCoordinateTypes spatial, - Supplier randomPoint, + Supplier randomGeometry, BiFunction, Matcher> matcher ) { cases.add(new TestCaseSupplier(name + "(" + dataType.typeName() + ")", List.of(dataType), () -> { - BytesRef wkb = spatial.pointAsWKB(randomPoint.get()); + BytesRef wkb = spatial.asWkb(randomGeometry.get()); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(wkb), dataType, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -479,7 +527,7 @@ protected static void points( })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { cases.add(new TestCaseSupplier(name + "(<" + dataType.typeName() + "s>) " + ordering, List.of(dataType), () -> { - List mvData = randomList(1, 100, () -> spatial.pointAsWKB(randomPoint.get())); + List mvData = randomList(1, 100, () -> spatial.asWkb(randomGeometry.get())); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(mvData, dataType, "field")), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index 1abbd62faa0bd..342baf405d0c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -39,6 +39,8 @@ public static Iterable parameters() { dateTimes(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); geoPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); cartesianPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + geoShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + cartesianShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java index 91c30b7c1f566..0f52efe20399e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java @@ -41,6 +41,8 @@ public static Iterable parameters() { dateTimes(cases, "mv_first", "MvFirst", DataTypes.DATETIME, (size, values) -> equalTo(values.findFirst().getAsLong())); geoPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.GEO_POINT, (size, values) -> equalTo(values.findFirst().get())); cartesianPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); + geoShape(cases, "mv_first", "MvFirst", EsqlDataTypes.GEO_SHAPE, (size, values) -> equalTo(values.findFirst().get())); + cartesianShape(cases, "mv_first", "MvFirst", EsqlDataTypes.CARTESIAN_SHAPE, (size, values) -> equalTo(values.findFirst().get())); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java index 7577cbf7dd0a8..41abab22c72ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java @@ -47,6 +47,14 @@ public static Iterable parameters() { EsqlDataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get()) ); + geoShape(cases, "mv_last", "MvLast", EsqlDataTypes.GEO_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + cartesianShape( + cases, + "mv_last", + "MvLast", + EsqlDataTypes.CARTESIAN_SHAPE, + (size, values) -> equalTo(values.reduce((f, s) -> s).get()) + ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index da6a7aec8462c..90b1bc22c45e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -10,16 +10,22 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class MvSumTests extends AbstractMultivalueFunctionTestCase { public MvSumTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -34,9 +40,50 @@ public static Iterable parameters() { // ints(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); // longs(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); // unsignedLongAsBigInteger(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); + + cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + List data = randomList(1, 10, () -> randomIntBetween(0, Integer.MAX_VALUE)); + data.add(Integer.MAX_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + List data = randomList(1, 10, () -> randomIntBetween(Integer.MIN_VALUE, 0)); + data.add(Integer.MIN_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + List data = randomList(1, 10, () -> randomLongBetween(0L, Long.MAX_VALUE)); + data.add(Long.MAX_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + List data = randomList(1, 10, () -> randomLongBetween(Long.MIN_VALUE, 0L)); + data.add(Long.MIN_VALUE); + return data; + })); + cases.add(arithmeticExceptionCase(DataTypes.UNSIGNED_LONG, () -> { + List data = randomList(1, 10, ESTestCase::randomLong); + data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); + return data; + })); return parameterSuppliersFromTypedData(cases); } + private static TestCaseSupplier arithmeticExceptionCase(DataType dataType, Supplier dataSupplier) { + String typeNameOverflow = dataType.typeName().toLowerCase(Locale.ROOT) + " overflow"; + return new TestCaseSupplier( + "<" + typeNameOverflow + ">", + List.of(dataType), + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(dataSupplier.get(), dataType, "field")), + "MvSum[field=Attribute[channel=0]]", + dataType, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ); + } + @Override protected Expression build(Source source, Expression field) { return new MvSum(source, field); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index 63e70b6612470..bc94ab39abccb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -104,6 +104,21 @@ public static Iterable parameters() { equalTo(str.endsWith(suffix)) ); })); + suppliers.add(new TestCaseSupplier("ends_with with text args", () -> { + String str = randomAlphaOfLength(5); + String suffix = randomAlphaOfLength(1); + str = str + suffix; + + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.TEXT, "suffix") + ), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataTypes.BOOLEAN, + equalTo(str.endsWith(suffix)) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 316bb679f2b70..6c3727455bbf1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -154,6 +154,19 @@ public static Iterable parameters() { equalTo(new BytesRef("")) ); })); + suppliers.add(new TestCaseSupplier("ascii as text input", () -> { + String text = randomAlphaOfLengthBetween(1, 64); + int length = between(1, text.length()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + ), + "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(unicodeLeftSubstring(text, length))) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java index c6eb2d1f2a2c0..e60fbd70d241f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -32,7 +33,8 @@ public LengthTests(@Name("TestCase") Supplier testCas @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("length basic test", () -> { + List cases = new ArrayList<>(); + cases.addAll(List.of(new TestCaseSupplier("length basic test", () -> { BytesRef value = new BytesRef(randomAlphaOfLength(between(0, 10000))); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(value, DataTypes.KEYWORD, "f")), @@ -40,23 +42,37 @@ public static Iterable parameters() { DataTypes.INTEGER, equalTo(UnicodeUtil.codePointCount(value)) ); - }), - new TestCaseSupplier("empty string", () -> makeTestCase("", 0)), - new TestCaseSupplier("single ascii character", () -> makeTestCase("a", 1)), - new TestCaseSupplier("ascii string", () -> makeTestCase("clump", 5)), - new TestCaseSupplier("3 bytes, 1 code point", () -> makeTestCase("☕", 1)), - new TestCaseSupplier("6 bytes, 2 code points", () -> makeTestCase("❗️", 2)), - new TestCaseSupplier("100 random alpha", () -> makeTestCase(randomAlphaOfLength(100), 100)), - new TestCaseSupplier("100 random code points", () -> makeTestCase(randomUnicodeOfCodepointLength(100), 100)) - )); + }))); + cases.addAll(makeTestCases("empty string", () -> "", 0)); + cases.addAll(makeTestCases("single ascii character", () -> "a", 1)); + cases.addAll(makeTestCases("ascii string", () -> "clump", 5)); + cases.addAll(makeTestCases("3 bytes, 1 code point", () -> "☕", 1)); + cases.addAll(makeTestCases("6 bytes, 2 code points", () -> "❗️", 2)); + cases.addAll(makeTestCases("100 random alpha", () -> randomAlphaOfLength(100), 100)); + cases.addAll(makeTestCases("100 random code points", () -> randomUnicodeOfCodepointLength(100), 100)); + return parameterSuppliersFromTypedData(cases); } - private static TestCaseSupplier.TestCase makeTestCase(String text, int expectedLength) { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "f")), - "LengthEvaluator[val=Attribute[channel=0]]", - DataTypes.INTEGER, - equalTo(expectedLength) + private static List makeTestCases(String title, Supplier text, int expectedLength) { + return List.of( + new TestCaseSupplier( + title + " with keyword", + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.KEYWORD, "f")), + "LengthEvaluator[val=Attribute[channel=0]]", + DataTypes.INTEGER, + equalTo(expectedLength) + ) + ), + new TestCaseSupplier( + title + " with text", + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.TEXT, "f")), + "LengthEvaluator[val=Attribute[channel=0]]", + DataTypes.INTEGER, + equalTo(expectedLength) + ) + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index 6e57a69720ca5..6c6500bfc333d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -22,6 +22,7 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import java.util.regex.PatternSyntaxException; import static org.hamcrest.Matchers.equalTo; @@ -79,24 +80,31 @@ public static Iterable parameters() { ) ); - // a syntactically wrong regex should yield null. And a warning header - // but for now we are letting the exception pass through. See also https://github.com/elastic/elasticsearch/issues/100038 - // suppliers.add(new TestCaseSupplier("invalid_regex", () -> { - // String text = randomAlphaOfLength(10); - // String invalidRegex = "["; - // String newStr = randomAlphaOfLength(5); - // return new TestCaseSupplier.TestCase( - // List.of( - // new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - // new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), - // new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") - // ), - // "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", - // DataTypes.KEYWORD, - // equalTo(null) - // ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - // .withWarning("java.util.regex.PatternSyntaxException: Unclosed character class near index 0\r\n[\r\n^"); - // })); + suppliers.add(new TestCaseSupplier("syntax error", List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + String text = randomAlphaOfLength(10); + String invalidRegex = "["; + String newStr = randomAlphaOfLength(5); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), + new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") + ), + "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(null) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^".replaceAll( + "\n", + System.lineSeparator() + ) + ) + .withFoldingException( + PatternSyntaxException.class, + "Unclosed character class near index 0\n[\n^".replaceAll("\n", System.lineSeparator()) + ); + })); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index 0eeb312512b30..500580585ff90 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -154,7 +154,19 @@ public static Iterable parameters() { equalTo(new BytesRef("")) ); })); - + suppliers.add(new TestCaseSupplier("ascii as text", () -> { + String text = randomAlphaOfLengthBetween(1, 64); + int length = between(1, text.length()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + ), + "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(unicodeRightSubstring(text, length))) + ); + })); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index e0611c7125e6e..56793bd1730d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -57,6 +57,22 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(strings.size() == 1 ? strings.get(0) : strings) ); + }), new TestCaseSupplier("split basic test with text input", () -> { + String delimiter = randomAlphaOfLength(1); + List strings = IntStream.range(0, between(1, 5)) + .mapToObj(i -> randomValueOtherThanMany(s -> s.contains(delimiter), () -> randomAlphaOfLength(4))) + .map(BytesRef::new) + .collect(Collectors.toList()); + String str = strings.stream().map(BytesRef::utf8ToString).collect(joining(delimiter)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(delimiter), DataTypes.TEXT, "delim") + ), + "SplitVariableEvaluator[str=Attribute[channel=0], delim=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(strings.size() == 1 ? strings.get(0) : strings) + ); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index 6eacea1d02987..961e27eea36c4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -46,6 +46,21 @@ public static Iterable parameters() { DataTypes.BOOLEAN, equalTo(str.startsWith(prefix)) ); + }), new TestCaseSupplier("Starts with basic test with text args", () -> { + String str = randomAlphaOfLength(5); + String prefix = randomAlphaOfLength(5); + if (randomBoolean()) { + str = prefix + str; + } + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(prefix), DataTypes.TEXT, "prefix") + ), + "StartsWithEvaluator[str=Attribute[channel=0], prefix=Attribute[channel=1]]", + DataTypes.BOOLEAN, + equalTo(str.startsWith(prefix)) + ); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index fd9cb29ec62c4..8dbc9eaeeccd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -50,6 +50,20 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) ); + }), new TestCaseSupplier("Substring basic test with text input", () -> { + int start = between(1, 8); + int length = between(1, 10 - start); + String text = randomAlphaOfLength(10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), + new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), + new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "end") + ), + "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) + ); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java new file mode 100644 index 0000000000000..bd9205c930d51 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.type.DateUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class ToLowerTests extends AbstractFunctionTestCase { + public ToLowerTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + + suppliers.add(supplier("keyword ascii", DataTypes.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword unicode", DataTypes.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("text ascii", DataTypes.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("text unicode", DataTypes.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); + + // add null as parameter + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); + } + + public void testRandomLocale() { + String testString = randomAlphaOfLength(10); + EsqlConfiguration cfg = randomLocaleConfig(); + ToLower func = new ToLower(Source.EMPTY, new Literal(Source.EMPTY, testString, DataTypes.KEYWORD), cfg); + assertThat(BytesRefs.toBytesRef(testString.toLowerCase(cfg.locale())), equalTo(func.fold())); + } + + private EsqlConfiguration randomLocaleConfig() { + return new EsqlConfiguration( + DateUtils.UTC, + randomLocale(random()), + null, + null, + new QueryPragmas(Settings.EMPTY), + EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE.getDefault(Settings.EMPTY), + EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE.getDefault(Settings.EMPTY), + "", + false + ); + } + + @Override + protected Expression build(Source source, List args) { + return new ToLower(source, args.get(0), EsqlTestUtils.TEST_CFG); + } + + private static TestCaseSupplier supplier(String name, DataType type, Supplier valueSupplier) { + return new TestCaseSupplier(name, List.of(type), () -> { + List values = new ArrayList<>(); + String expectedToString = "ToLowerEvaluator[val=Attribute[channel=0], locale=en_US]"; + + String value = valueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(value), type, "0")); + + String expectedValue = value.toLowerCase(EsqlTestUtils.TEST_CFG.locale()); + return new TestCaseSupplier.TestCase(values, expectedToString, type, equalTo(new BytesRef(expectedValue))); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java new file mode 100644 index 0000000000000..ce7c011f201d8 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.type.DateUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class ToUpperTests extends AbstractFunctionTestCase { + public ToUpperTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + + suppliers.add(supplier("keyword ascii", DataTypes.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword unicode", DataTypes.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("text ascii", DataTypes.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("text unicode", DataTypes.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); + + // add null as parameter + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); + } + + public void testRandomLocale() { + String testString = randomAlphaOfLength(10); + EsqlConfiguration cfg = randomLocaleConfig(); + ToUpper func = new ToUpper(Source.EMPTY, new Literal(Source.EMPTY, testString, DataTypes.KEYWORD), cfg); + assertThat(BytesRefs.toBytesRef(testString.toUpperCase(cfg.locale())), equalTo(func.fold())); + } + + private EsqlConfiguration randomLocaleConfig() { + return new EsqlConfiguration( + DateUtils.UTC, + randomLocale(random()), + null, + null, + new QueryPragmas(Settings.EMPTY), + EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE.getDefault(Settings.EMPTY), + EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE.getDefault(Settings.EMPTY), + "", + false + ); + } + + @Override + protected Expression build(Source source, List args) { + return new ToUpper(source, args.get(0), EsqlTestUtils.TEST_CFG); + } + + private static TestCaseSupplier supplier(String name, DataType type, Supplier valueSupplier) { + return new TestCaseSupplier(name, List.of(type), () -> { + List values = new ArrayList<>(); + String expectedToString = "ToUpperEvaluator[val=Attribute[channel=0], locale=en_US]"; + + String value = valueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(value), type, "0")); + + String expectedValue = value.toUpperCase(EsqlTestUtils.TEST_CFG.locale()); + return new TestCaseSupplier.TestCase(values, expectedToString, type, equalTo(new BytesRef(expectedValue))); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java index 49233a19114c8..02005d51c96d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java @@ -17,10 +17,12 @@ import java.util.List; import java.util.Locale; +import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public abstract class AbstractArithmeticTestCase extends AbstractBinaryOperatorTestCase { protected Matcher resultMatcher(List data, DataType dataType) { @@ -119,4 +121,27 @@ protected DataType expectedType(DataType lhsType, DataType rhsType) { } throw new UnsupportedOperationException(); } + + static TestCaseSupplier arithmeticExceptionOverflowCase( + DataType dataType, + Supplier lhsSupplier, + Supplier rhsSupplier, + String evaluator + ) { + String typeNameOverflow = dataType.typeName().toLowerCase(Locale.ROOT) + " overflow"; + return new TestCaseSupplier( + "<" + typeNameOverflow + ">", + List.of(dataType), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(lhsSupplier.get(), dataType, "lhs"), + new TestCaseSupplier.TypedData(rhsSupplier.get(), dataType, "rhs") + ), + evaluator + "[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + dataType, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index a3a707e35f599..48e5bdf5333bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -26,8 +26,10 @@ import java.util.Set; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.AbstractArithmeticTestCase.arithmeticExceptionOverflowCase; import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -206,6 +208,47 @@ public static Iterable parameters() { ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); + // exact math arithmetic exceptions + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(1, Integer.MAX_VALUE), + () -> Integer.MAX_VALUE, + "AddIntsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(Integer.MIN_VALUE, -1), + () -> Integer.MIN_VALUE, + "AddIntsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(1L, Long.MAX_VALUE), + () -> Long.MAX_VALUE, + "AddLongsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(Long.MIN_VALUE, -1L), + () -> Long.MIN_VALUE, + "AddLongsEvaluator" + ) + ); + suppliers.add( + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> asLongUnsigned(randomBigInteger()), + () -> asLongUnsigned(UNSIGNED_LONG_MAX), + "AddUnsignedLongsEvaluator" + ) + ); return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 82b6bbda276b6..4aa8786f2cd69 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -10,15 +10,19 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.math.BigInteger; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; import static org.hamcrest.Matchers.equalTo; @@ -95,6 +99,27 @@ public static Iterable parameters() { )); } + // run dedicated test to avoid the JVM optimized ArithmeticException that lacks a message + public void testDivisionByZero() { + DataType testCaseType = testCase.getData().get(0).type(); + List data = switch (testCaseType.typeName()) { + case "INTEGER" -> List.of(randomInt(), 0); + case "LONG" -> List.of(randomLong(), 0L); + case "UNSIGNED_LONG" -> List.of(randomLong(), ZERO_AS_UNSIGNED_LONG); + default -> null; + }; + if (data != null) { + var op = build(Source.EMPTY, field("lhs", testCaseType), field("rhs", testCaseType)); + try (Block block = evaluator(op).get(driverContext()).eval(row(data))) { + assertCriticalWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: / by zero" + ); + assertNull(toJavaObject(block, 0)); + } + } + } + @Override protected boolean rhsOk(Object o) { if (o instanceof Number n) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index 425ef2bb11a6b..5beaf0b782af7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -10,15 +10,19 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.math.BigInteger; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; import static org.hamcrest.Matchers.equalTo; @@ -95,6 +99,27 @@ public static Iterable parameters() { )); } + // run dedicated test to avoid the JVM optimized ArithmeticException that lacks a message + public void testDivisionByZero() { + DataType testCaseType = testCase.getData().get(0).type(); + List data = switch (testCaseType.typeName()) { + case "INTEGER" -> List.of(randomInt(), 0); + case "LONG" -> List.of(randomLong(), 0L); + case "UNSIGNED_LONG" -> List.of(randomLong(), ZERO_AS_UNSIGNED_LONG); + default -> null; + }; + if (data != null) { + var op = build(Source.EMPTY, field("lhs", testCaseType), field("rhs", testCaseType)); + try (Block block = evaluator(op).get(driverContext()).eval(row(data))) { + assertCriticalWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: / by zero" + ); + assertNull(toJavaObject(block, 0)); + } + } + } + @Override protected boolean rhsOk(Object o) { if (o instanceof Number n) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index 5a621af8caf21..09d0ad9e095ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -19,6 +19,8 @@ import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.AbstractArithmeticTestCase.arithmeticExceptionOverflowCase; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.equalTo; public class MulTests extends AbstractFunctionTestCase { @@ -66,20 +68,38 @@ public static Iterable parameters() { DataTypes.DOUBLE, equalTo(lhs * rhs) ); - })/*, new TestCaseSupplier("ULong * ULong", () -> { - // Ensure we don't have an overflow - long rhs = randomLongBetween(0, 1024); - long lhs = randomLongBetween(0, 1024); - BigInteger lhsBI = unsignedLongAsBigInteger(lhs); - BigInteger rhsBI = unsignedLongAsBigInteger(rhs); - return new TestCase( - Source.EMPTY, - List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), - "MulUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - equalTo(asLongUnsigned(lhsBI.multiply(rhsBI).longValue())) - ); - }) - */ + }), /* new TestCaseSupplier("ULong * ULong", () -> { + // Ensure we don't have an overflow + long rhs = randomLongBetween(0, 1024); + long lhs = randomLongBetween(0, 1024); + BigInteger lhsBI = unsignedLongAsBigInteger(lhs); + BigInteger rhsBI = unsignedLongAsBigInteger(rhs); + return new TestCase( + Source.EMPTY, + List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), + "MulUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + equalTo(asLongUnsigned(lhsBI.multiply(rhsBI).longValue())) + ); + }) + */ + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomBoolean() ? Integer.MIN_VALUE : Integer.MAX_VALUE, + () -> randomIntBetween(2, Integer.MAX_VALUE), + "MulIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomBoolean() ? Long.MIN_VALUE : Long.MAX_VALUE, + () -> randomLongBetween(2L, Long.MAX_VALUE), + "MulLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> asLongUnsigned(UNSIGNED_LONG_MAX), + () -> asLongUnsigned(randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE)), + "MulUnsignedLongsEvaluator" + ) )); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index f3dfde482b77f..b4f7dc9fc0392 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -22,8 +22,10 @@ import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.AbstractArithmeticTestCase.arithmeticExceptionOverflowCase; import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -150,7 +152,39 @@ public static Iterable parameters() { is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); - }))); + }), + // exact math arithmetic exceptions + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> Integer.MIN_VALUE, + () -> randomIntBetween(1, Integer.MAX_VALUE), + "SubIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.INTEGER, + () -> randomIntBetween(Integer.MIN_VALUE, -2), + () -> Integer.MAX_VALUE, + "SubIntsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> Long.MIN_VALUE, + () -> randomLongBetween(1L, Long.MAX_VALUE), + "SubLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.LONG, + () -> randomLongBetween(Long.MIN_VALUE, -2L), + () -> Long.MAX_VALUE, + "SubLongsEvaluator" + ), + arithmeticExceptionOverflowCase( + DataTypes.UNSIGNED_LONG, + () -> ZERO_AS_UNSIGNED_LONG, + () -> randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE), + "SubUnsignedLongsEvaluator" + ) + )); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java index 37ab820146bf4..71aa945594584 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java @@ -94,8 +94,8 @@ protected final void validateType(BinaryOperator op, DataType lhsTyp equalTo( String.format( Locale.ROOT, - "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point or " - + "cartesian_point], found value [] type [%s]", + "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point, " + + "geo_shape, cartesian_point or cartesian_shape], found value [] type [%s]", lhsType.typeName(), rhsType.typeName(), lhsType.typeName() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index bbe32350a0465..8403dc3775dce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -260,8 +260,8 @@ private static EsqlQueryResponse regularData() { ); BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); - geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); - geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); + geoPoints.append(GEO.asWkb(new Point(12, 56))); + geoPoints.append(GEO.asWkb(new Point(-97, 26))); // values List values = List.of( new Page( @@ -272,8 +272,8 @@ private static EsqlQueryResponse regularData() { blockFactory.newIntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build() ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index b8800713eca89..482ff84e1fd30 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -47,8 +47,8 @@ public class TextFormatterTests extends ESTestCase { private static final BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); static { - geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); - geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); + geoPoints.append(GEO.asWkb(new Point(12, 56))); + geoPoints.append(GEO.asWkb(new Point(-97, 26))); } EsqlQueryResponse esqlResponse = new EsqlQueryResponse( @@ -72,8 +72,8 @@ public class TextFormatterTests extends ESTestCase { ).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build(), blockFactory.newConstantNullBlock(2) ) @@ -146,8 +146,8 @@ public void testFormatWithoutHeader() { ).asBlock(), blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) - .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) + .appendBytesRef(CARTESIAN.asWkb(new Point(1234, 5678))) + .appendBytesRef(CARTESIAN.asWkb(new Point(-9753, 2611))) .build(), blockFactory.newConstantNullBlock(2) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 85612427a1867..6f3991a0e8323 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; @@ -604,6 +605,7 @@ static AggregateFunction randomAggFunction() { case 6 -> new MedianAbsoluteDeviation(Source.EMPTY, field); case 7 -> new CountDistinct(Source.EMPTY, field, right); case 8 -> new Percentile(Source.EMPTY, field, right); + case 9 -> new SpatialCentroid(Source.EMPTY, field); default -> throw new AssertionError(v); }; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index ac2426f485fcc..4b01a93b7e709 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -32,19 +33,24 @@ import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; +import org.hamcrest.Matchers; import org.junit.BeforeClass; import java.util.List; +import java.util.Locale; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_SEARCH_STATS; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForExistingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -299,6 +305,48 @@ public void testIsNotNullOnExpression() { var source = as(filter.child(), EsRelation.class); } + public void testSparseDocument() throws Exception { + var query = """ + from large + | keep field00* + | limit 10 + """; + + int size = 256; + Map large = Maps.newLinkedHashMapWithExpectedSize(size); + for (int i = 0; i < size; i++) { + var name = String.format(Locale.ROOT, "field%03d", i); + large.put(name, new EsField(name, DataTypes.INTEGER, emptyMap(), true, false)); + } + + SearchStats searchStats = statsForExistingField("field000", "field001", "field002", "field003", "field004"); + + EsIndex index = new EsIndex("large", large); + IndexResolution getIndexResult = IndexResolution.valid(index); + var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + + var analyzer = new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, EsqlTestUtils.emptyPolicyResolution()), + TEST_VERIFIER + ); + + var analyzed = analyzer.analyze(parser.createStatement(query)); + var optimized = logicalOptimizer.optimize(analyzed); + var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, searchStats); + var plan = new LocalLogicalPlanOptimizer(localContext).localOptimize(optimized); + + var project = as(plan, Project.class); + assertThat(project.projections(), hasSize(10)); + assertThat( + Expressions.names(project.projections()), + contains("field000", "field001", "field002", "field003", "field004", "field005", "field006", "field007", "field008", "field009") + ); + var eval = as(project.child(), Eval.class); + var field = eval.fields().get(0); + assertThat(Expressions.name(field), is("field005")); + assertThat(Alias.unwrap(field).fold(), Matchers.nullValue()); + } + private LocalRelation asEmptyRelation(Object o) { var empty = as(o, LocalRelation.class); assertThat(empty.supplier(), is(LocalSupplier.EMPTY)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 5887d61c652bb..9a558daea6de6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -21,15 +21,17 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.Verifier; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; +import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -45,6 +47,7 @@ import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; @@ -55,7 +58,6 @@ import java.util.List; import java.util.Map; -import java.util.Set; import static java.util.Arrays.asList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -66,6 +68,7 @@ import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -133,25 +136,21 @@ public void init() { physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); - var enrichResolution = new EnrichResolution( - Set.of( - new EnrichPolicyResolution( - "foo", - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), - IndexResolution.valid( - new EsIndex( - "idx", - Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) - ) - ) - ) + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy( + "foo", + Enrich.Mode.ANY, + new ResolvedEnrichPolicy( + "fld", + EnrichPolicy.MATCH_TYPE, + List.of("a", "b"), + Map.of("", "idx"), + Map.ofEntries( + Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) ) - ), - Set.of("foo") + ) ); - analyzer = new Analyzer( new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), new Verifier(new Metrics()) @@ -383,28 +382,90 @@ public boolean exists(String field) { assertThat(Expressions.names(localSource.output()), contains("count", "seen")); } + /** + * Expects + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n + * ame{f}#7, long_noidx{f}#12, salary{f}#8]] + * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..] + * \_EsQueryExec[test], query[{"exists":{"field":"emp_no","boost":1.0}}][_doc{f}#13], limit[500], sort[] estimatedRowSize[324] + */ public void testIsNotNullPushdownFilter() { var plan = plan("from test | where emp_no is not null"); var limit = as(plan, LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); - var query = as(exchange.child(), EsQueryExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); assertThat(query.limit().fold(), is(500)); var expected = QueryBuilders.existsQuery("emp_no"); assertThat(query.query().toString(), is(expected.toString())); } + /** + * Expects + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n + * ame{f}#7, long_noidx{f}#12, salary{f}#8]] + * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..] + * \_EsQueryExec[test], query[{"bool":{"must_not":[{"exists":{"field":"emp_no","boost":1.0}}],"boost":1.0}}][_doc{f}#13], + * limit[500], sort[] estimatedRowSize[324] + */ public void testIsNullPushdownFilter() { var plan = plan("from test | where emp_no is null"); var limit = as(plan, LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); - var query = as(exchange.child(), EsQueryExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); assertThat(query.limit().fold(), is(500)); var expected = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("emp_no")); assertThat(query.query().toString(), is(expected.toString())); } + /** + * Expects + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{r}#2, first_name{r}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, first_n + * ame{r}#3 AS last_name, long_noidx{f}#11, emp_no{r}#2 AS salary]] + * \_FieldExtractExec[_meta_field{f}#8, gender{f}#4, job{f}#9, job.raw{f}..] + * \_EvalExec[[null[INTEGER] AS emp_no, null[KEYWORD] AS first_name]] + * \_EsQueryExec[test], query[][_doc{f}#12], limit[500], sort[] estimatedRowSize[270] + */ + public void testMissingFieldsDoNotGetExtracted() { + var stats = EsqlTestUtils.statsForMissingField("first_name", "last_name", "emp_no", "salary"); + + var plan = plan("from test", stats); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var projections = project.projections(); + assertThat( + Expressions.names(projections), + contains("_meta_field", "emp_no", "first_name", "gender", "job", "job.raw", "languages", "last_name", "long_noidx", "salary") + ); + // emp_no + assertThat(projections.get(1), instanceOf(ReferenceAttribute.class)); + // first_name + assertThat(projections.get(2), instanceOf(ReferenceAttribute.class)); + + // last_name --> first_name + var nullAlias = Alias.unwrap(projections.get(7)); + assertThat(Expressions.name(nullAlias), is("first_name")); + // salary --> emp_no + nullAlias = Alias.unwrap(projections.get(9)); + assertThat(Expressions.name(nullAlias), is("emp_no")); + // check field extraction is skipped and that evaled fields are not extracted anymore + var field = as(project.child(), FieldExtractExec.class); + var fields = field.attributesToExtract(); + assertThat(Expressions.names(fields), contains("_meta_field", "gender", "job", "job.raw", "languages", "long_noidx")); + } + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName, Source source) { return FilterTests.singleValueQuery(inner, fieldName, source); } @@ -431,15 +492,15 @@ private PhysicalPlan plan(String query, SearchStats stats) { private PhysicalPlan optimizedPlan(PhysicalPlan plan, SearchStats searchStats) { // System.out.println("* Physical Before\n" + plan); - var p = EstimatesRowSize.estimateRowSize(0, physicalPlanOptimizer.optimize(plan)); - // System.out.println("* Physical After\n" + p); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, physicalPlanOptimizer.optimize(plan)); + // System.out.println("* Physical After\n" + physicalPlan); // the real execution breaks the plan at the exchange and then decouples the plan // this is of no use in the unit tests, which checks the plan as a whole instead of each // individually hence why here the plan is kept as is var logicalTestOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(config, searchStats)); var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(config, searchStats), true); - var l = PlannerUtils.localPlan(plan, logicalTestOptimizer, physicalTestOptimizer); + var l = PlannerUtils.localPlan(physicalPlan, logicalTestOptimizer, physicalTestOptimizer); // handle local reduction alignment l = PhysicalPlanOptimizerTests.localRelationshipAlignment(l); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 6320294d7ee54..ed3df60ecf13b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; -import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -37,6 +35,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.parser.EsqlParser; @@ -82,7 +81,6 @@ import java.util.List; import java.util.Map; -import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -94,10 +92,12 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; @@ -118,29 +118,35 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Analyzer analyzer; private static LogicalPlanOptimizer logicalOptimizer; private static Map mapping; + private static Map mappingAirports; + private static Analyzer analyzerAirports; @BeforeClass public static void init() { parser = new EsqlParser(); - - mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); - IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); - EnrichPolicyResolution policy = AnalyzerTestUtils.loadEnrichPolicyResolution( + var enrichResolution = AnalyzerTestUtils.loadEnrichPolicyResolution( "languages_idx", "id", "languages_idx", "mapping-languages.json" ); + + // Most tests used data from the test index, so we load it here, and use it in the plan() function. + mapping = loadMapping("mapping-basic.json"); + EsIndex test = new EsIndex("test", mapping); + IndexResolution getIndexResult = IndexResolution.valid(test); analyzer = new Analyzer( - new AnalyzerContext( - EsqlTestUtils.TEST_CFG, - new EsqlFunctionRegistry(), - getIndexResult, - new EnrichResolution(Set.of(policy), Set.of("languages_idx", "something")) - ), + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), + TEST_VERIFIER + ); + + // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. + mappingAirports = loadMapping("mapping-airports.json"); + EsIndex airports = new EsIndex("airports", mappingAirports); + IndexResolution getIndexResultAirports = IndexResolution.valid(airports); + analyzerAirports = new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResultAirports, enrichResolution), TEST_VERIFIER ); } @@ -2768,6 +2774,127 @@ public void testIsNotNullConstraintForAliasedExpressions() { var from = as(eval.child(), EsRelation.class); } + /** + * Expects + * Limit[500[INTEGER]] + * \_Aggregate[[],[SPATIALCENTROID(location{f}#9) AS centroid]] + * \_EsRelation[airports][abbrev{f}#5, location{f}#9, name{f}#6, scalerank{f}..] + */ + public void testSpatialTypesAndStatsUseDocValues() { + var plan = planAirports(""" + from test + | stats centroid = st_centroid(location) + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.aggregates()), contains("centroid")); + assertTrue("Expected GEO_POINT aggregation for STATS", agg.aggregates().stream().allMatch(aggExp -> { + var alias = as(aggExp, Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + var aggField = as(aggFunc.field(), FieldAttribute.class); + return aggField.dataType() == GEO_POINT; + })); + + var from = as(agg.child(), EsRelation.class); + } + + /** + * Expects + * Limit[500[INTEGER]] + * \_Aggregate[[emp_no%2{r}#6],[COUNT(salary{f}#12) AS c, emp_no%2{r}#6]] + * \_Eval[[emp_no{f}#7 % 2[INTEGER] AS emp_no%2]] + * \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + */ + public void testNestedExpressionsInGroups() { + var plan = optimizedPlan(""" + from test + | stats c = count(salary) by emp_no % 2 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var groupings = agg.groupings(); + var aggs = agg.aggregates(); + var ref = as(groupings.get(0), ReferenceAttribute.class); + assertThat(aggs.get(1), is(ref)); + var eval = as(agg.child(), Eval.class); + assertThat(eval.fields(), hasSize(1)); + assertThat(eval.fields().get(0).toAttribute(), is(ref)); + assertThat(eval.fields().get(0).name(), is("emp_no % 2")); + } + + /** + * Expects + * Limit[500[INTEGER]] + * \_Aggregate[[emp_no{f}#6],[COUNT(__c_COUNT@1bd45f36{r}#16) AS c, emp_no{f}#6]] + * \_Eval[[salary{f}#11 + 1[INTEGER] AS __c_COUNT@1bd45f36]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + */ + public void testNestedExpressionsInAggs() { + var plan = optimizedPlan(""" + from test + | stats c = count(salary + 1) by emp_no + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + var count = aliased(aggs.get(0), Count.class); + var ref = as(count.field(), ReferenceAttribute.class); + var eval = as(agg.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields, hasSize(1)); + assertThat(fields.get(0).toAttribute(), is(ref)); + var add = aliased(fields.get(0), Add.class); + assertThat(Expressions.name(add.left()), is("salary")); + } + + /** + * Limit[500[INTEGER]] + * \_Aggregate[[emp_no%2{r}#7],[COUNT(__c_COUNT@fb7855b0{r}#18) AS c, emp_no%2{r}#7]] + * \_Eval[[emp_no{f}#8 % 2[INTEGER] AS emp_no%2, 100[INTEGER] / languages{f}#11 + salary{f}#13 + 1[INTEGER] AS __c_COUNT + * @fb7855b0]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testNestedExpressionsInBothAggsAndGroups() { + var plan = optimizedPlan(""" + from test + | stats c = count(salary + 1 + 100 / languages) by emp_no % 2 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var groupings = agg.groupings(); + var aggs = agg.aggregates(); + var gRef = as(groupings.get(0), ReferenceAttribute.class); + assertThat(aggs.get(1), is(gRef)); + + var count = aliased(aggs.get(0), Count.class); + var aggRef = as(count.field(), ReferenceAttribute.class); + var eval = as(agg.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields, hasSize(2)); + assertThat(fields.get(0).toAttribute(), is(gRef)); + assertThat(fields.get(1).toAttribute(), is(aggRef)); + + var mod = aliased(fields.get(0), Mod.class); + assertThat(Expressions.name(mod.left()), is("emp_no")); + var refs = Expressions.references(singletonList(fields.get(1))); + assertThat(Expressions.names(refs), containsInAnyOrder("languages", "salary")); + } + + public void testNestedMultiExpressionsInGroupingAndAggs() { + var plan = optimizedPlan(""" + from test + | stats count(salary + 1), max(salary + 23) by languages + 1, emp_no % 3 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.output()), contains("count(salary + 1)", "max(salary + 23)", "languages + 1", "emp_no % 3")); + } + private LogicalPlan optimizedPlan(String query) { return plan(query); } @@ -2780,6 +2907,14 @@ private LogicalPlan plan(String query) { return optimized; } + private LogicalPlan planAirports(String query) { + var analyzed = analyzerAirports.analyze(parser.createStatement(query)); + // System.out.println(analyzed); + var optimized = logicalOptimizer.optimize(analyzed); + // System.out.println(optimized); + return optimized; + } + private void assertNullLiteral(Expression expression) { assertEquals(Literal.class, expression.getClass()); assertNull(expression.fold()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index c05e11d8d8a13..8b2324c22f8b9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -25,15 +26,21 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -52,6 +59,7 @@ import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; +import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException; @@ -61,6 +69,7 @@ import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.FieldAttribute; @@ -69,9 +78,16 @@ import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; +import org.elasticsearch.xpack.ql.plan.logical.EsRelation; +import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; @@ -84,6 +100,7 @@ import static java.util.Arrays.asList; import static org.elasticsearch.core.Tuple.tuple; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -94,6 +111,8 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; +import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.PARTIAL; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.expression.Expressions.name; import static org.elasticsearch.xpack.ql.expression.Expressions.names; import static org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC; @@ -117,12 +136,14 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { private static final int KEYWORD_EST = EstimatesRowSize.estimateSize(DataTypes.KEYWORD); private EsqlParser parser; - private Analyzer analyzer; private LogicalPlanOptimizer logicalOptimizer; private PhysicalPlanOptimizer physicalPlanOptimizer; private Mapper mapper; private Map mapping; + private Analyzer analyzer; private int allFieldRowSize; + private static Map mappingAirports; + private static Analyzer analyzerAirports; private final EsqlConfiguration config; @@ -145,8 +166,30 @@ public PhysicalPlanOptimizerTests(String name, EsqlConfiguration config) { @Before public void init() { parser = new EsqlParser(); - + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); + FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); + mapper = new Mapper(functionRegistry); + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy( + "foo", + Enrich.Mode.ANY, + new ResolvedEnrichPolicy( + "fld", + EnrichPolicy.MATCH_TYPE, + List.of("a", "b"), + Map.of("", "idx"), + Map.ofEntries( + Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) + ) + ) + ); + // Most tests used data from the test index, so we load it here, and use it in the plan() function. mapping = loadMapping("mapping-basic.json"); + EsIndex test = new EsIndex("test", mapping); + IndexResolution getIndexResult = IndexResolution.valid(test); + analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); allFieldRowSize = mapping.values() .stream() .mapToInt( @@ -158,32 +201,15 @@ public void init() { .sum()) ) .sum(); - EsIndex test = new EsIndex("test", mapping); - IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); - physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); - FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); - mapper = new Mapper(functionRegistry); - var enrichResolution = new EnrichResolution( - Set.of( - new EnrichPolicyResolution( - "foo", - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), - IndexResolution.valid( - new EsIndex( - "idx", - Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) - ) - ) - ) - ) - ), - Set.of("foo") - ); - analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); + // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. + mappingAirports = loadMapping("mapping-airports.json"); + EsIndex airports = new EsIndex("airports", mappingAirports); + IndexResolution getIndexResultAirports = IndexResolution.valid(airports); + analyzerAirports = new Analyzer( + new AnalyzerContext(config, functionRegistry, getIndexResultAirports, enrichResolution), + TEST_VERIFIER + ); } public void testSingleFieldExtractor() { @@ -1461,6 +1487,68 @@ public void testPushDownRLike() { assertEquals(".*foo.*", wildcard.value()); } + /** + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n + * ame{f}#7, long_noidx{f}#12, salary{f}#8]] + * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..] + * \_EsQueryExec[test], query[{"esql_single_value":{"field":"first_name","next": + * {"term":{"first_name":{"value":"foo","case_insensitive":true}}},"source":"first_name =~ \"foo\"@2:9"}}] + * [_doc{f}#23], limit[500], sort[] estimatedRowSize[324] + */ + public void testPushDownEqualsIgnoreCase() { + var plan = physicalPlan(""" + from test + | where first_name =~ "foo" + """); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + var exchange = asRemoteExchange(topLimit.child()); + var project = as(exchange.child(), ProjectExec.class); + var extractRest = as(project.child(), FieldExtractExec.class); + var source = source(extractRest.child()); + assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES)); + + QueryBuilder query = source.query(); + assertNotNull(query); + } + + /** + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, job{f}#13, job.raw{f}#14, languages{f}#9, last_ + * name{f}#10, long_noidx{f}#15, salary{f}#11, x{r}#4]] + * \_FieldExtractExec[_meta_field{f}#12, emp_no{f}#6, gender{f}#8, job{f}..] + * \_LimitExec[500[INTEGER]] + * \_FilterExec[x{r}#4 =~ [66 6f 6f][KEYWORD]] + * \_EvalExec[[CONCAT(first_name{f}#7,[66 6f 6f][KEYWORD]) AS x]] + * \_FieldExtractExec[first_name{f}#7] + * \_EsQueryExec[test], query[][_doc{f}#27], limit[], sort[] estimatedRowSize[374] + */ + public void testNoPushDownEvalEqualsIgnoreCase() { + var plan = physicalPlan(""" + from test + | eval x = concat(first_name, "foo") + | where x =~ "foo" + """); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + var exchange = asRemoteExchange(topLimit.child()); + var project = as(exchange.child(), ProjectExec.class); + var extractRest = as(project.child(), FieldExtractExec.class); + var limit = as(extractRest.child(), LimitExec.class); + var filter = as(limit.child(), FilterExec.class); + var eval = as(filter.child(), EvalExec.class); + var extract = as(eval.child(), FieldExtractExec.class); + var source = source(extract.child()); + + QueryBuilder query = source.query(); + assertNull(query); + } + public void testPushDownNotRLike() { var plan = physicalPlan(""" from test @@ -1900,7 +1988,7 @@ public void testAvgSurrogateFunctionAfterRenameAndLimit() { var aggFinal = as(limit.child(), AggregateExec.class); assertThat(aggFinal.getMode(), equalTo(FINAL)); var aggPartial = as(aggFinal.child(), AggregateExec.class); - assertThat(aggPartial.getMode(), equalTo(AggregateExec.Mode.PARTIAL)); + assertThat(aggPartial.getMode(), equalTo(PARTIAL)); limit = as(aggPartial.child(), LimitExec.class); assertThat(limit.limit(), instanceOf(Literal.class)); assertThat(limit.limit().fold(), equalTo(10)); @@ -2050,6 +2138,577 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { assertThat(Expressions.names(source.output()), contains("sum", "seen", "count", "seen")); } + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#9) AS centroid],FINAL,null] + * \_ExchangeExec[[xVal{r}#10, xDel{r}#11, yVal{r}#12, yDel{r}#13, count{r}#14],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#9) AS centroid]] + * \_EsRelation[airports][abbrev{f}#5, location{f}#9, name{f}#6, scalerank{f}..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#9) AS centroid],FINAL,50] + * \_ExchangeExec[[xVal{r}#10, xDel{r}#11, yVal{r}#12, yDel{r}#13, count{r}#14],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#9) AS centroid],PARTIAL,50] + * \_FilterExec[ISNOTNULL(location{f}#9)] + * \_FieldExtractExec[location{f}#9][location{f}#9] + * \_EsQueryExec[airports], query[][_doc{f}#26], limit[], sort[] estimatedRowSize[54] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValues() { + var plan = physicalPlanAirports(""" + from airports + | stats centroid = st_centroid(location) + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var filter = as(agg.child(), FilterExec.class); + var extract = as(filter.child(), FieldExtractExec.class); + source(extract.child()); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@b54a93a7{r}#10) AS centroid],FINAL,null] + * \_ExchangeExec[[xVal{r}#11, xDel{r}#12, yVal{r}#13, yDel{r}#14, count{r}#15],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@b54a93a7{r}#10) AS centroid]] + * \_Eval[[TOGEOPOINT(location{f}#9) AS __centroid_SPATIALCENTROID@b54a93a7]] + * \_EsRelation[airports][abbrev{f}#5, location{f}#9, name{f}#6, scalerank{f}..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@ad2847b6{r}#10) AS centroid],FINAL,50] + * \_ExchangeExec[[xVal{r}#11, xDel{r}#12, yVal{r}#13, yDel{r}#14, count{r}#15],true] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@ad2847b6{r}#10) AS centroid],PARTIAL,50] + * \_EvalExec[[TOGEOPOINT(location{f}#9) AS __centroid_SPATIALCENTROID@ad2847b6]] + * \_FieldExtractExec[location{f}#9][] + * \_EsQueryExec[airports], query[][_doc{f}#28], limit[], sort[] estimatedRowSize[104] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesNested() { + var plan = physicalPlanAirports(""" + from airports + | stats centroid = st_centroid(to_geopoint(location)) + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var eval = as(fAgg.child(), Eval.class); + var toGeoPoint = as(eval.fields().get(0).child(), ToGeoPoint.class); + assertThat("Expected point field", toGeoPoint.field().dataType(), equalTo(GEO_POINT)); + as(eval.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // TODO: Change this to expect to useDocValues for correctly nested reference attributes that relate to functions on fields + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + var evalExec = as(agg.child(), EvalExec.class); + var extract = as(evalExec.child(), FieldExtractExec.class); + source(extract.child()); + // TODO: update this test when we support nested fields in SpatialDocValuesExtraction + // assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> + // { + // MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + // return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + // })); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@ec8dd77e{r}#7) AS centroid],FINAL,null] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@ec8dd77e{r}#7) AS centroid],PARTIAL,null] + * \_EvalExec[[[1 1 0 0 0 0 0 30 e2 4c 7c 45 40 0 0 e0 92 b0 82 2d 40][GEO_POINT] AS __centroid_SPATIALCENTROID@ec8dd77e]] + * \_RowExec[[[50 4f 49 4e 54 28 34 32 2e 39 37 31 30 39 36 32 39 39 35 38 38 36 38 20 31 34 2e 37 35 35 32 35 33 34 30 30 + * 36 35 33 36 29][KEYWORD] AS wkt]] + * + * After local optimizations we expect no changes because field is extracted: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@7ff910a{r}#7) AS centroid],FINAL,50] + * \_AggregateExec[[],[SPATIALCENTROID(__centroid_SPATIALCENTROID@7ff910a{r}#7) AS centroid],PARTIAL,50] + * \_EvalExec[[[1 1 0 0 0 0 0 30 e2 4c 7c 45 40 0 0 e0 92 b0 82 2d 40][GEO_POINT] AS __centroid_SPATIALCENTROID@7ff910a]] + * \_RowExec[[[50 4f 49 4e 54 28 34 32 2e 39 37 31 30 39 36 32 39 39 35 38 38 36 38 20 31 34 2e 37 35 35 32 35 33 34 30 30 + * 36 35 33 36 29][KEYWORD] AS wkt]] + */ + public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { + var plan = physicalPlanAirports(""" + row wkt = "POINT(42.97109629958868 14.7552534006536)" + | stats centroid = st_centroid(to_geopoint(wkt)) + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + var eval = as(agg.child(), EvalExec.class); + as(eval.child(), RowExec.class); + + // Now optimize the plan and assert the same plan again, since no FieldExtractExec is added + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + eval = as(agg.child(), EvalExec.class); + as(eval.child(), RowExec.class); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#11) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#12, xDel{r}#13, yVal{r}#14, yDel{r}#15, count{r}#16, count{r}#17, seen{r}#18],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#11) AS centroid, COUNT([2a][KEYWORD]) AS count]] + * \_EsRelation[airports][abbrev{f}#7, location{f}#11, name{f}#8, scalerank{f..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#11) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,58] + * \_ExchangeExec[[xVal{r}#12, xDel{r}#13, yVal{r}#14, yDel{r}#15, count{r}#16, count{r}#17, seen{r}#18],true] + * \_AggregateExec[[],[COUNT([2a][KEYWORD]) AS count, SPATIALCENTROID(location{f}#11) AS centroid],PARTIAL,58] + * \_FieldExtractExec[location{f}#11][location{f}#11] + * \_EsQueryExec[airports], query[][_doc{f}#33], limit[], sort[] estimatedRowSize[54] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { + var plan = physicalPlanAirports(""" + from airports + | stats centroid = st_centroid(location), count = COUNT() + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + source(extract.child()); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#14) AS airports, SPATIALCENTROID(city_location{f}#17) AS cities, COUNT([2a][KEY + * WORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#18, xDel{r}#19, yVal{r}#20, yDel{r}#21, count{r}#22, xVal{r}#23, xDel{r}#24, yVal{r}#25, yDel{r}#26, + * count{r}#27, count{r}#28, seen{r}#29],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#14) AS airports, SPATIALCENTROID(city_location{f}#17) AS cities, COUNT([2a][KEY + * WORD]) AS count]] + * \_EsRelation[airports][abbrev{f}#10, city{f}#16, city_location{f}#17, coun..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#14) AS airports, SPATIALCENTROID(city_location{f}#17) AS cities, COUNT([2a][KEY + * WORD]) AS count],FINAL,108] + * \_ExchangeExec[[xVal{r}#18, xDel{r}#19, yVal{r}#20, yDel{r}#21, count{r}#22, xVal{r}#23, xDel{r}#24, yVal{r}#25, yDel{r}#26, + * count{r}#27, count{r}#28, seen{r}#29],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#14) AS airports, SPATIALCENTROID(city_location{f}#17) AS cities, COUNT([2a][KEY + * WORD]) AS count],PARTIAL,108] + * \_FieldExtractExec[location{f}#14, city_location{f}#17][location{f}#14, city_location{f}#17] + * \_EsQueryExec[airports], query[][_doc{f}#53], limit[], sort[] estimatedRowSize[104] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { + var plan = physicalPlanAirports(""" + FROM airports + | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "airports", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "cities", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "airports", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "cities", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "airports", SpatialCentroid.class, GEO_POINT, true); + assertAggregation(agg, "cities", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + source(extract.child()); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#13, xDel{r}#14, yVal{r}#15, yDel{r}#16, count{r}#17, count{r}#18, seen{r}#19],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count]] + * \_Filter[scalerank{f}#10 == 9[INTEGER]] + * \_EsRelation[airports][abbrev{f}#8, location{f}#12, name{f}#9, scalerank{f..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#11) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,58] + * \_ExchangeExec[[xVal{r}#12, xDel{r}#13, yVal{r}#14, yDel{r}#15, count{r}#16, count{r}#17, seen{r}#18],true] + * \_AggregateExec[[],[COUNT([2a][KEYWORD]) AS count, SPATIALCENTROID(location{f}#11) AS centroid],PARTIAL,58] + * \_FieldExtractExec[location{f}#11][location{f}#11] + * \_EsQueryExec[airports], query[{"esql_single_value":{"field":"scalerank","next":{"term":{"scalerank":{"value":9}}}, + * "source":"scalerank == 9@2:9"}}][_doc{f}#34], limit[], sort[] estimatedRowSize[54] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { + var plan = physicalPlanAirports(""" + FROM airports + | WHERE scalerank == 9 + | STATS centroid=ST_CENTROID(location), count=COUNT() + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertFilterCondition(filter, Equals.class, "scalerank", 9); + as(filter.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + var source = source(extract.child()); + var qb = as(source.query(), SingleValueQuery.Builder.class); + assertThat("Expected predicate to be passed to Lucene query", qb.source().text(), equalTo("scalerank == 9")); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[scalerank{f}#10],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count, scalerank{f}#10], + * FINAL,null] + * \_ExchangeExec[[scalerank{f}#10, xVal{r}#13, xDel{r}#14, yVal{r}#15, yDel{r}#16, count{r}#17, count{r}#18, seen{r}#19],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[scalerank{f}#10],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count, scalerank{f}#10]] + * \_EsRelation[airports][abbrev{f}#8, location{f}#12, name{f}#9, scalerank{f..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[scalerank{f}#10],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count, scalerank{f}#10], + * FINAL,62] + * \_ExchangeExec[[scalerank{f}#10, xVal{r}#13, xDel{r}#14, yVal{r}#15, yDel{r}#16, count{r}#17, count{r}#18, seen{r}#19],true] + * \_AggregateExec[[scalerank{f}#10],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count, scalerank{f}#10], + * PARTIAL,62] + * \_FieldExtractExec[location{f}#12][location{f}#12] + * \_EsQueryExec[airports], query[][_doc{f}#34], limit[], sort[] estimatedRowSize[54] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { + var plan = physicalPlanAirports(""" + FROM airports + | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("One grouping in aggregation", agg.groupings().size(), equalTo(1)); + var att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + source(extract.child()); + } + + /** + * Before local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(centroid{r}#4) AS centroid, SUM(count{r}#6) AS count],FINAL,null] + * \_AggregateExec[[],[SPATIALCENTROID(centroid{r}#4) AS centroid, SUM(count{r}#6) AS count],PARTIAL,null] + * \_AggregateExec[[scalerank{f}#16],[SPATIALCENTROID(location{f}#18) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[scalerank{f}#16, xVal{r}#19, xDel{r}#20, yVal{r}#21, yDel{r}#22, count{r}#23, count{r}#24, seen{r}#25],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[scalerank{f}#16],[SPATIALCENTROID(location{f}#18) AS centroid, COUNT([2a][KEYWORD]) AS count]] + * \_EsRelation[airports][abbrev{f}#14, location{f}#18, name{f}#15, scalerank..]]] + * + * After local optimizations: + * + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(centroid{r}#4) AS centroid, SUM(count{r}#6) AS count],FINAL,58] + * \_AggregateExec[[],[SPATIALCENTROID(centroid{r}#4) AS centroid, SUM(count{r}#6) AS count],PARTIAL,58] + * \_AggregateExec[[scalerank{f}#16],[SPATIALCENTROID(location{f}#18) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,58] + * \_ExchangeExec[[scalerank{f}#16, xVal{r}#19, xDel{r}#20, yVal{r}#21, yDel{r}#22, count{r}#23, count{r}#24, seen{r}#25],true] + * \_AggregateExec[[scalerank{f}#16],[SPATIALCENTROID(location{f}#18) AS centroid, COUNT([2a][KEYWORD]) AS count],PARTIAL,58] + * \_FieldExtractExec[location{f}#18][location{f}#18] + * \_EsQueryExec[airports], query[][_doc{f}#42], limit[], sort[] estimatedRowSize[54] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + */ + public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregated() { + var plan = physicalPlanAirports(""" + FROM airports + | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank + | STATS centroid=ST_CENTROID(centroid), count=SUM(count) + """); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "count", Sum.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "count", Sum.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("One grouping in aggregation", agg.groupings().size(), equalTo(1)); + var att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "count", Sum.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + assertAggregation(agg, "count", Sum.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + agg = as(agg.child(), AggregateExec.class); + assertThat("Aggregation is FINAL", agg.getMode(), equalTo(FINAL)); + assertThat("One grouping in aggregation", agg.groupings().size(), equalTo(1)); + att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("One grouping in aggregation", agg.groupings().size(), equalTo(1)); + att = as(agg.groupings().get(0), Attribute.class); + assertThat(att.name(), equalTo("scalerank")); + // below the exchange (in data node) the aggregation is using doc-values + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { + MappedFieldType.FieldExtractPreference extractPreference = extract.extractPreference(attr); + return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; + })); + source(extract.child()); + } + + @SuppressWarnings("SameParameterValue") + private static void assertFilterCondition( + Filter filter, + Class conditionClass, + String fieldName, + Object expected + ) { + var condition = as(filter.condition(), conditionClass); + var field = as(condition.left(), FieldAttribute.class); + assertThat("Expected filter field", field.name(), equalTo(fieldName)); + var value = as(condition.right(), Literal.class); + assertThat("Expected filter value", value.value(), equalTo(expected)); + } + + private static void assertAggregation( + PhysicalPlan plan, + String aliasName, + Class aggClass, + DataType fieldType, + boolean useDocValues + ) { + var aggFunc = assertAggregation(plan, aliasName, aggClass); + var aggField = as(aggFunc.field(), Attribute.class); + var spatialAgg = as(aggFunc, SpatialAggregateFunction.class); + assertThat("Expected spatial aggregation to use doc-values", spatialAgg.useDocValues(), equalTo(useDocValues)); + assertThat("", aggField.dataType(), equalTo(fieldType)); + } + + private static AggregateFunction assertAggregation(PhysicalPlan plan, String aliasName, Class aggClass) { + var agg = as(plan, AggregateExec.class); + var aggExp = agg.aggregates().stream().filter(a -> { + var alias = as(a, Alias.class); + return alias.name().equals(aliasName); + }).findFirst().orElseThrow(() -> new AssertionError("Expected aggregation " + aliasName + " not found")); + var alias = as(aggExp, Alias.class); + assertThat(alias.name(), is(aliasName)); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc, instanceOf(aggClass)); + return aggFunc; + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -2115,6 +2774,15 @@ private PhysicalPlan physicalPlan(String query) { return physical; } + private PhysicalPlan physicalPlanAirports(String query) { + var logical = logicalOptimizer.optimize(analyzerAirports.analyze(parser.createStatement(query))); + // System.out.println("Logical\n" + logical); + var physical = mapper.map(logical); + // System.out.println(physical); + assertSerialization(physical); + return physical; + } + private List sorts(List orders) { return orders.stream().map(o -> new FieldSort((FieldAttribute) o.child(), o.direction(), o.nullsPosition())).toList(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index b20d166beb22e..4b908e815ffe3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -54,6 +54,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.function.Function; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -226,7 +227,7 @@ public void testEvalImplicitNames() { List.of( new Alias( EMPTY, - "fn(a+1)", + "fn(a + 1)", new UnresolvedFunction(EMPTY, "fn", DEFAULT, List.of(new Add(EMPTY, attribute("a"), integer(1)))) ) ) @@ -676,7 +677,16 @@ public void testLikeRLike() { public void testEnrich() { assertEquals( - new Enrich(EMPTY, PROCESSING_CMD_INPUT, new Literal(EMPTY, "countries", KEYWORD), new EmptyAttribute(EMPTY), null, List.of()), + new Enrich( + EMPTY, + PROCESSING_CMD_INPUT, + null, + new Literal(EMPTY, "countries", KEYWORD), + new EmptyAttribute(EMPTY), + null, + Map.of(), + List.of() + ), processingCommand("enrich countries") ); @@ -684,12 +694,29 @@ public void testEnrich() { new Enrich( EMPTY, PROCESSING_CMD_INPUT, + null, + new Literal(EMPTY, "index-policy", KEYWORD), + new UnresolvedAttribute(EMPTY, "field_underscore"), + null, + Map.of(), + List.of() + ), + processingCommand("enrich index-policy ON field_underscore") + ); + + Enrich.Mode mode = randomFrom(Enrich.Mode.values()); + assertEquals( + new Enrich( + EMPTY, + PROCESSING_CMD_INPUT, + mode, new Literal(EMPTY, "countries", KEYWORD), new UnresolvedAttribute(EMPTY, "country_code"), null, + Map.of(), List.of() ), - processingCommand("enrich countries ON country_code") + processingCommand("enrich [ccq.mode :" + mode.name() + "] countries ON country_code") ); expectError("from a | enrich countries on foo* ", "Using wildcards (*) in ENRICH WITH projections is not allowed [foo*]"); @@ -702,6 +729,10 @@ public void testEnrich() { "from a | enrich countries on foo with x* = bar ", "Using wildcards (*) in ENRICH WITH projections is not allowed [x*]" ); + expectError( + "from a | enrich [ccq.mode : typo] countries on foo", + "line 1:30: Unrecognized value [typo], ENRICH [ccq.mode] needs to be one of [ANY, COORDINATOR, REMOTE]" + ); } public void testMvExpand() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 27a45e71a69c1..c1ef69a0bf7ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -28,8 +28,6 @@ import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; @@ -122,6 +120,7 @@ private Matcher maxPageSizeMatcher(boolean estimatedRowSizeIsHuge, int private LocalExecutionPlanner planner() throws IOException { return new LocalExecutionPlanner( "test", + "", null, BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance(), @@ -150,7 +149,7 @@ private EsqlConfiguration config() { private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOException { int numShards = randomIntBetween(1, 1000); - List searchContexts = new ArrayList<>(numShards); + List shardContexts = new ArrayList<>(numShards); var searcher = new ContextIndexSearcher( reader(), IndexSearcher.getDefaultSimilarity(), @@ -159,12 +158,16 @@ private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOExc true ); for (int i = 0; i < numShards; i++) { - searchContexts.add( - new TestSearchContext(createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher), null, searcher) + shardContexts.add( + new EsPhysicalOperationProviders.DefaultShardContext( + i, + createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher), + null + ) ); } - releasables.addAll(searchContexts); - return new EsPhysicalOperationProviders(searchContexts); + releasables.add(searcher); + return new EsPhysicalOperationProviders(shardContexts); } private IndexReader reader() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 8377530b9fbc2..f78b9bcfd5c98 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.Describable; @@ -14,10 +15,12 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; @@ -25,6 +28,7 @@ import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -32,15 +36,21 @@ import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; import java.util.List; import java.util.Random; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.IntStream; import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; import static java.util.stream.Collectors.joining; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; public class TestPhysicalOperationProviders extends AbstractPhysicalOperationProviders { @@ -58,7 +68,7 @@ public PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fieldExt PhysicalOperation op = source; for (Attribute attr : fieldExtractExec.attributesToExtract()) { layout.append(attr); - op = op.with(new TestFieldExtractOperatorFactory(attr.name()), layout.build()); + op = op.with(new TestFieldExtractOperatorFactory(attr, fieldExtractExec.extractPreference(attr)), layout.build()); } return op; } @@ -148,14 +158,18 @@ private class TestFieldExtractOperator implements Operator { private Page lastPage; boolean finished; String columnName; + private final DataType dataType; + private final MappedFieldType.FieldExtractPreference extractPreference; - TestFieldExtractOperator(String columnName) { + TestFieldExtractOperator(String columnName, DataType dataType, MappedFieldType.FieldExtractPreference extractPreference) { this.columnName = columnName; + this.dataType = dataType; + this.extractPreference = extractPreference; } @Override public void addInput(Page page) { - Block block = extractBlockForColumn(page, columnName); + Block block = extractBlockForColumn(page, columnName, dataType, extractPreference); lastPage = page.appendBlock(block); } @@ -188,13 +202,12 @@ public void close() { } private class TestFieldExtractOperatorFactory implements Operator.OperatorFactory { - - final String columnName; final Operator op; + private String columnName; - TestFieldExtractOperatorFactory(String columnName) { - this.columnName = columnName; - this.op = new TestFieldExtractOperator(columnName); + TestFieldExtractOperatorFactory(Attribute attr, MappedFieldType.FieldExtractPreference extractPreference) { + this.op = new TestFieldExtractOperator(attr.name(), attr.dataType(), extractPreference); + this.columnName = attr.name(); } @Override @@ -204,7 +217,7 @@ public Operator get(DriverContext driverContext) { @Override public String describe() { - return "TestFieldExtractOperator"; + return "TestFieldExtractOperator(" + columnName + ")"; } } @@ -224,7 +237,7 @@ private class TestHashAggregationOperator extends HashAggregationOperator { @Override protected Page wrapPage(Page page) { - return page.appendBlock(extractBlockForColumn(page, columnName)); + return page.appendBlock(extractBlockForColumn(page, columnName, null, NONE)); } } @@ -280,7 +293,12 @@ public String describe() { } } - private Block extractBlockForColumn(Page page, String columnName) { + private Block extractBlockForColumn( + Page page, + String columnName, + DataType dataType, + MappedFieldType.FieldExtractPreference extractPreference + ) { var columnIndex = -1; // locate the block index corresponding to "columnName" for (int i = 0, size = columnNames.size(); i < size && columnIndex < 0; i++) { @@ -294,12 +312,84 @@ private Block extractBlockForColumn(Page page, String columnName) { DocBlock docBlock = page.getBlock(0); IntVector docIndices = docBlock.asVector().docs(); Block originalData = testData.getBlock(columnIndex); - Block.Builder builder = originalData.elementType() - .newBlockBuilder(docIndices.getPositionCount(), TestBlockFactory.getNonBreakingInstance()); - for (int c = 0; c < docIndices.getPositionCount(); c++) { - int doc = docIndices.getInt(c); - builder.copyFrom(originalData, doc, doc + 1); + var blockCopier = shouldMapToDocValues(dataType, extractPreference) + ? TestSpatialPointStatsBlockCopier.create(docIndices, dataType) + : new TestBlockCopier(docIndices); + return blockCopier.copyBlock(originalData); + } + + private boolean shouldMapToDocValues(DataType dataType, MappedFieldType.FieldExtractPreference extractPreference) { + return extractPreference == DOC_VALUES && EsqlDataTypes.isSpatialPoint(dataType); + } + + private static class TestBlockCopier { + + protected final IntVector docIndices; + + private TestBlockCopier(IntVector docIndices) { + this.docIndices = docIndices; + } + + protected Block copyBlock(Block originalData) { + try ( + Block.Builder builder = originalData.elementType() + .newBlockBuilder(docIndices.getPositionCount(), TestBlockFactory.getNonBreakingInstance()) + ) { + for (int c = 0; c < docIndices.getPositionCount(); c++) { + int doc = docIndices.getInt(c); + builder.copyFrom(originalData, doc, doc + 1); + } + return builder.build(); + } + } + } + + /** + * geo_point and cartesian_point are normally loaded as WKT from source, but for aggregations we can load them as doc-values + * which are encoded Long values. This class is used to convert the test loaded WKB into encoded longs for the aggregators. + * TODO: We need a different solution to support geo_shape and cartesian_shape + */ + private abstract static class TestSpatialPointStatsBlockCopier extends TestBlockCopier { + + private TestSpatialPointStatsBlockCopier(IntVector docIndices) { + super(docIndices); + } + + protected abstract long encode(BytesRef wkb); + + @Override + protected Block copyBlock(Block originalData) { + BytesRef scratch = new BytesRef(100); + BytesRefBlock bytesRefBlock = (BytesRefBlock) originalData; + try (LongBlock.Builder builder = bytesRefBlock.blockFactory().newLongBlockBuilder(docIndices.getPositionCount())) { + for (int c = 0; c < docIndices.getPositionCount(); c++) { + int doc = docIndices.getInt(c); + int count = bytesRefBlock.getValueCount(doc); + int i = bytesRefBlock.getFirstValueIndex(doc); + if (count == 0) { + builder.appendNull(); + } else { + for (int v = 0; v < count; v++) { + builder.appendLong(encode(bytesRefBlock.getBytesRef(i, scratch))); + } + } + } + return builder.build(); + } + } + + private static TestSpatialPointStatsBlockCopier create(IntVector docIndices, DataType dataType) { + Function encoder = switch (dataType.esType()) { + case "geo_point" -> SpatialCoordinateTypes.GEO::wkbAsLong; + case "cartesian_point" -> SpatialCoordinateTypes.CARTESIAN::wkbAsLong; + default -> throw new IllegalArgumentException("Unsupported spatial data type: " + dataType); + }; + return new TestSpatialPointStatsBlockCopier(docIndices) { + @Override + protected long encode(BytesRef wkb) { + return encoder.apply(wkb); + } + }; } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 1947249086568..c9e9a2b75ebab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; @@ -54,14 +55,24 @@ public void shutdownThreadPool() throws Exception { terminate(threadPool); } + @SuppressWarnings("unchecked") + EnrichPolicyResolver mockEnrichResolver() { + EnrichPolicyResolver enrichResolver = mock(EnrichPolicyResolver.class); + doAnswer(invocation -> { + Object[] arguments = invocation.getArguments(); + ActionListener listener = (ActionListener) arguments[arguments.length - 1]; + listener.onResponse(new EnrichResolution()); + return null; + }).when(enrichResolver).resolvePolicies(any(), any(), any()); + return enrichResolver; + } + public void testFailedMetric() { Client client = mock(Client.class); IndexResolver idxResolver = new IndexResolver(client, randomAlphaOfLength(10), EsqlDataTypeRegistry.INSTANCE, Set::of); var planExecutor = new PlanExecutor(idxResolver); String[] indices = new String[] { "test" }; - EnrichPolicyResolver enrichResolver = mock(EnrichPolicyResolver.class); - when(enrichResolver.allPolicyNames()).thenReturn(Set.of()); - + var enrichResolver = mockEnrichResolver(); // simulate a valid field_caps response so we can parse and correctly analyze de query FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class); when(fieldCapabilitiesResponse.getIndices()).thenReturn(indices); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index c4350c8ec74d7..43dec76c7de24 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.dissect.DissectParser; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -34,7 +32,6 @@ import org.elasticsearch.xpack.ql.expression.UnresolvedNamedExpression; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.tree.Node; import org.elasticsearch.xpack.ql.tree.NodeSubclassTests; @@ -86,20 +83,6 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -363,8 +365,8 @@ public List getRestHandlers( ) { return List.of( new RestGetGlobalCheckpointsAction(), - new RestFleetSearchAction(restController.getSearchUsageHolder()), - new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder()), + new RestFleetSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry), + new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry), new RestGetSecretsAction(), new RestPostSecretsAction(), new RestDeleteSecretsAction() diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/DeleteSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/DeleteSecretAction.java index 4245b2fca52a6..38103c82ee5e7 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/DeleteSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/DeleteSecretAction.java @@ -16,6 +16,6 @@ public class DeleteSecretAction extends ActionType { public static final DeleteSecretAction INSTANCE = new DeleteSecretAction(); private DeleteSecretAction() { - super(NAME, DeleteSecretResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java index 1e3794a4cefe4..a87297702bd30 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.TimeValue; @@ -55,7 +54,7 @@ public class GetGlobalCheckpointsAction extends ActionType { public static final GetSecretAction INSTANCE = new GetSecretAction(); private GetSecretAction() { - super(NAME, GetSecretResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/PostSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/PostSecretAction.java index c0a0a0bff4fe6..bb30bce36cc48 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/PostSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/PostSecretAction.java @@ -16,6 +16,6 @@ public class PostSecretAction extends ActionType { public static final PostSecretAction INSTANCE = new PostSecretAction(); private PostSecretAction() { - super(NAME, PostSecretResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index c177bea2e63ca..5e7ef365b6592 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; @@ -40,10 +41,16 @@ public class RestFleetMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final SearchUsageHolder searchUsageHolder; + private final NamedWriteableRegistry namedWriteableRegistry; - public RestFleetMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder) { + public RestFleetMultiSearchAction( + Settings settings, + SearchUsageHolder searchUsageHolder, + NamedWriteableRegistry namedWriteableRegistry + ) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.searchUsageHolder = searchUsageHolder; + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -65,7 +72,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { final MultiSearchRequest multiSearchRequest = RestMultiSearchAction.parseRequest( request, - client.getNamedWriteableRegistry(), + namedWriteableRegistry, allowExplicitIndex, searchUsageHolder, (key, value, searchRequest) -> { diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index 73af65b2f31a6..ce606fdd17363 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -36,9 +37,11 @@ public class RestFleetSearchAction extends BaseRestHandler { private final SearchUsageHolder searchUsageHolder; + private final NamedWriteableRegistry namedWriteableRegistry; - public RestFleetSearchAction(SearchUsageHolder searchUsageHolder) { + public RestFleetSearchAction(SearchUsageHolder searchUsageHolder, NamedWriteableRegistry namedWriteableRegistry) { this.searchUsageHolder = searchUsageHolder; + this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -68,14 +71,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull(parser -> { - RestSearchAction.parseSearchRequest( - searchRequest, - request, - parser, - client.getNamedWriteableRegistry(), - setSize, - searchUsageHolder - ); + RestSearchAction.parseSearchRequest(searchRequest, request, parser, namedWriteableRegistry, setSize, searchUsageHolder); String[] stringWaitForCheckpoints = request.paramAsStringArray("wait_for_checkpoints", Strings.EMPTY_ARRAY); final long[] waitForCheckpoints = new long[stringWaitForCheckpoints.length]; for (int i = 0; i < stringWaitForCheckpoints.length; ++i) { diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index cf727d93702bb..8931669c53ce8 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -64,6 +65,7 @@ public List> getSettings() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 4be64fd852dc0..3623d3671e83f 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -8,9 +8,9 @@ import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; @@ -97,7 +97,7 @@ public void setUp() throws Exception { indicesAdmin().prepareRefresh("test").get(); // Ensure single segment with no deletes. Hopefully solves test instability in // issue https://github.com/elastic/x-pack-elasticsearch/issues/918 - ForceMergeResponse actionGet = indicesAdmin().prepareForceMerge("test").setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse actionGet = indicesAdmin().prepareForceMerge("test").setFlush(true).setMaxNumSegments(1).get(); indicesAdmin().prepareRefresh("test").get(); assertAllSuccessful(actionGet); for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get("test")) { diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java index 4e9502c073d2a..c64b5ada48c3f 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -57,6 +58,7 @@ public Graph(Settings settings) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle index a17fcde8691c9..46e705ce27244 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle @@ -5,7 +5,6 @@ dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) //TODO: update javaRestTests to not rely on any code that it is testing javaRestTestImplementation project(path: xpackModule('identity-provider')) - javaRestTestImplementation project(":client:rest-high-level") } testClusters.configureEach { diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java index b8565bc4ff898..c065e8d7e1d12 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdentityProviderAuthenticationIT.java @@ -30,7 +30,7 @@ import java.util.Map; import java.util.Set; -import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -185,8 +185,8 @@ private void authenticateWithSamlResponse(String samlResponse, @Nullable String equalTo("urn:oasis:names:tc:SAML:2.0:nameid-format:transient") ); assertThat(ObjectPath.eval("metadata.saml_roles", authMap), instanceOf(List.class)); - assertThat(ObjectPath.eval("metadata.saml_roles", authMap), hasSize(1)); - assertThat(ObjectPath.eval("metadata.saml_roles", authMap), contains("viewer")); + assertThat(ObjectPath.eval("metadata.saml_roles", authMap), hasSize(2)); + assertThat(ObjectPath.eval("metadata.saml_roles", authMap), containsInAnyOrder("viewer", "custom")); } } diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml index 0867c806f3126..174d92a033b42 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/resources/roles.yml @@ -8,4 +8,4 @@ idp_user: applications: - application: elastic-cloud resources: ["ec:123456:abcdefg"] - privileges: ["sso:viewer"] + privileges: ["sso:viewer", "sso:custom"] diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java index 7a279e367a0e8..a61e4c4e1c69e 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -128,6 +129,7 @@ public Collection createComponents(PluginServices services) { @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/DeleteSamlServiceProviderAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/DeleteSamlServiceProviderAction.java index 080cd202c6f0d..38971be7d5062 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/DeleteSamlServiceProviderAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/DeleteSamlServiceProviderAction.java @@ -18,6 +18,6 @@ public class DeleteSamlServiceProviderAction extends ActionType { public static final SamlMetadataAction INSTANCE = new SamlMetadataAction(); private SamlMetadataAction() { - super(NAME, SamlMetadataResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestAction.java index 0bca97eb3a1fb..baa337ccc72c3 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestAction.java @@ -14,6 +14,6 @@ public class SamlValidateAuthnRequestAction extends ActionType listener ) { - actionsResolver.getActions(service.getApplicationName(), listener.delegateFailureAndWrap((delegate, actions) -> { + var groupedListener = new GroupedActionListener>(2, listener.delegateFailureAndWrap((delegate, actionSets) -> { + final Set actions = actionSets.stream().flatMap(Set::stream).collect(Collectors.toUnmodifiableSet()); if (actions == null || actions.isEmpty()) { logger.warn("No application-privilege actions defined for application [{}]", service.getApplicationName()); delegate.onResponse(null); @@ -141,5 +147,24 @@ private void buildResourcePrivilege( delegate.onResponse(builder.build()); } })); + + // We need to enumerate possible actions that might be authorized for the user. Here we combine actions that + // have been granted to the user via roles and other actions that are registered privileges for the given + // application. These actions will be checked by a has-privileges check above + final GetUserPrivilegesRequest request = new GetUserPrivilegesRequestBuilder(client).username(securityContext.getUser().principal()) + .request(); + client.execute( + GetUserPrivilegesAction.INSTANCE, + request, + groupedListener.map( + userPrivileges -> userPrivileges.getApplicationPrivileges() + .stream() + .filter(appPriv -> appPriv.getApplication().equals(service.getApplicationName())) + .map(appPriv -> appPriv.getPrivileges()) + .flatMap(Arrays::stream) + .collect(Collectors.toUnmodifiableSet()) + ) + ); + actionsResolver.getActions(service.getApplicationName(), groupedListener); } } diff --git a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java index 621c9f764de56..7b569e405732f 100644 --- a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java +++ b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java @@ -17,16 +17,20 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; import org.elasticsearch.xpack.core.security.user.User; import org.junit.Before; -import org.mockito.Mockito; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -50,11 +54,14 @@ public class UserPrivilegeResolverTests extends ESTestCase { private SecurityContext securityContext; private UserPrivilegeResolver resolver; + private String app; + @Before @SuppressWarnings("unchecked") public void setupTest() { client = mock(Client.class); securityContext = new SecurityContext(Settings.EMPTY, new ThreadContext(Settings.EMPTY)); + app = randomAlphaOfLengthBetween(3, 8); final ApplicationActionsResolver actionsResolver = mock(ApplicationActionsResolver.class); doAnswer(inv -> { final Object[] args = inv.getArguments(); @@ -63,12 +70,41 @@ public void setupTest() { listener.onResponse(Set.of("role:cluster:view", "role:cluster:admin", "role:cluster:operator", "role:cluster:monitor")); return null; }).when(actionsResolver).getActions(anyString(), any(ActionListener.class)); + doAnswer(inv -> { + final Object[] args = inv.getArguments(); + assertThat(args, arrayWithSize(3)); + ActionListener listener = (ActionListener) args[args.length - 1]; + RoleDescriptor.ApplicationResourcePrivileges appPriv1 = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(app) + .resources("resource1") + .privileges("role:extra1") + .build(); + RoleDescriptor.ApplicationResourcePrivileges appPriv2 = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(app) + .resources("resource1") + .privileges("role:extra2", "role:extra3") + .build(); + RoleDescriptor.ApplicationResourcePrivileges discardedAppPriv = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(randomAlphaOfLengthBetween(3, 8)) + .resources("resource1") + .privileges("role:discarded") + .build(); + GetUserPrivilegesResponse response = new GetUserPrivilegesResponse( + Set.of(), + Set.of(), + Set.of(), + Set.of(appPriv1, appPriv2, discardedAppPriv), + Set.of(), + Set.of() + ); + listener.onResponse(response); + return null; + }).when(client).execute(same(GetUserPrivilegesAction.INSTANCE), any(GetUserPrivilegesRequest.class), any(ActionListener.class)); resolver = new UserPrivilegeResolver(client, securityContext, actionsResolver); } public void testResolveZeroAccess() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); setupUser(username, () -> { setupHasPrivileges(username, app); final PlainActionFuture future = new PlainActionFuture<>(); @@ -93,7 +129,6 @@ public void testResolveZeroAccess() throws Exception { public void testResolveSsoWithNoRoleAccess() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -118,7 +153,6 @@ public void testResolveSsoWithNoRoleAccess() throws Exception { public void testResolveSsoWithSingleRole() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -143,7 +177,6 @@ public void testResolveSsoWithSingleRole() throws Exception { public void testResolveSsoWithMultipleRoles() throws Exception { final String username = randomAlphaOfLengthBetween(4, 12); - final String app = randomAlphaOfLengthBetween(3, 8); final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); final String viewerAction = "role:cluster:view"; final String adminAction = "role:cluster:admin"; @@ -183,6 +216,35 @@ public void testResolveSsoWithMultipleRoles() throws Exception { }); } + public void testResolveSsoWithActionDefinedInUserPrivileges() throws Exception { + final String username = randomAlphaOfLengthBetween(4, 12); + final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16)); + final String actionInUserPrivs = "role:extra2"; + final String adminAction = "role:cluster:admin"; + + setupUser(username, () -> { + setupHasPrivileges(username, app, access(resource, actionInUserPrivs, true), access(resource, adminAction, false)); + + final PlainActionFuture future = new PlainActionFuture<>(); + final Function> roleMapping = Map.of( + actionInUserPrivs, + Set.of("extra2"), + adminAction, + Set.of("admin") + )::get; + resolver.resolve(service(app, resource, roleMapping), future); + final UserPrivilegeResolver.UserPrivileges privileges; + try { + privileges = future.get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + assertThat(privileges.principal, equalTo(username)); + assertThat(privileges.hasAccess, equalTo(true)); + assertThat(privileges.roles, containsInAnyOrder("extra2")); + }); + } + private ServiceProviderPrivileges service(String appName, String resource, Function> roleMapping) { return new ServiceProviderPrivileges(appName, resource, roleMapping); } @@ -209,10 +271,24 @@ private HasPrivilegesResponse setupHasPrivileges( final Map> appPrivs = Map.of(appName, privileges); final HasPrivilegesResponse response = new HasPrivilegesResponse(username, isCompleteMatch, Map.of(), Set.of(), appPrivs); - Mockito.doAnswer(inv -> { + doAnswer(inv -> { final Object[] args = inv.getArguments(); assertThat(args.length, equalTo(3)); ActionListener listener = (ActionListener) args[args.length - 1]; + HasPrivilegesRequest request = (HasPrivilegesRequest) args[1]; + Set gotPriviliges = Arrays.stream(request.applicationPrivileges()) + .flatMap(appPriv -> Arrays.stream(appPriv.getPrivileges())) + .collect(Collectors.toUnmodifiableSet()); + Set expectedPrivileges = Set.of( + "role:cluster:view", + "role:cluster:admin", + "role:cluster:operator", + "role:cluster:monitor", + "role:extra1", + "role:extra2", + "role:extra3" + ); + assertEquals(expectedPrivileges, gotPriviliges); listener.onResponse(response); return null; }).when(client).execute(same(HasPrivilegesAction.INSTANCE), any(HasPrivilegesRequest.class), any(ActionListener.class)); diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index 4dbc77497fc3a..8712af84ac245 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -6,7 +6,6 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(xpackModule('ilm')) - javaRestTestImplementation project(":client:rest-high-level") } File repoDir = file("$buildDir/testclusters/repo") diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index cf8e06892d6d0..fe0b98ab658da 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -94,9 +95,16 @@ public static Map explainIndex(RestClient client, String indexNa public static Map> explain(RestClient client, String indexPattern, boolean onlyErrors, boolean onlyManaged) throws IOException { + RequestOptions consumeWarningsOptions = RequestOptions.DEFAULT.toBuilder() + .setWarningsHandler(warnings -> warnings.isEmpty() == false && List.of(""" + [indices.lifecycle.rollover.only_if_has_documents] setting was deprecated in Elasticsearch \ + and will be removed in a future release.""").equals(warnings) == false) + .build(); + Request explainRequest = new Request("GET", indexPattern + "/_ilm/explain"); explainRequest.addParameter("only_errors", Boolean.toString(onlyErrors)); explainRequest.addParameter("only_managed", Boolean.toString(onlyManaged)); + explainRequest.setOptions(consumeWarningsOptions); Response response = client.performRequest(explainRequest); Map responseMap; try (InputStream is = response.getEntity().getContent()) { diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index d446873bd1f75..a6fa7cd3ffbc6 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -213,6 +213,7 @@ public void testRollupIndex() throws Exception { assertEquals(index, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); + assertEquals(fixedInterval.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey())); }); assertBusy( () -> assertTrue("Alias [" + alias + "] does not point to index [" + rollupIndex + "]", aliasExists(rollupIndex, alias)) @@ -299,6 +300,7 @@ public void testRollupIndexInTheHotPhaseAfterRollover() throws Exception { assertEquals(originalIndex, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); + assertEquals(fixedInterval.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey())); }); } @@ -345,6 +347,7 @@ public void testTsdbDataStreams() throws Exception { assertEquals(backingIndexName, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); + assertEquals(fixedInterval.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey())); }); } @@ -478,6 +481,7 @@ public void testDownsampleTwice() throws Exception { assertEquals(downsampleIndexName, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); + assertEquals("1h", settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey())); }, 60, TimeUnit.SECONDS); } catch (AssertionError ae) { if (indexExists(firstBackingIndex)) { @@ -559,6 +563,7 @@ public void testDownsampleTwiceSameInterval() throws Exception { assertEquals(firstBackingIndex, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME.getKey())); assertEquals(firstBackingIndex, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); + assertEquals("5m", settings.get(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey())); assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); }, 60, TimeUnit.SECONDS); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index b97caa6d96ed8..404d9a05396e9 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.routing.RoutingNode; @@ -431,7 +432,7 @@ public void testCreatePolicyWhenStopped() throws Exception { assertAcked(client().execute(ILMActions.STOP, new StopILMRequest()).get()); assertBusy(() -> { - OperationMode mode = client().execute(GetStatusAction.INSTANCE, new GetStatusAction.Request()).get().getMode(); + OperationMode mode = client().execute(GetStatusAction.INSTANCE, new AcknowledgedRequest.Plain()).get().getMode(); logger.info("--> waiting for STOPPED, currently: {}", mode); assertThat(mode, equalTo(OperationMode.STOPPED)); }); @@ -455,7 +456,7 @@ public void testCreatePolicyWhenStopped() throws Exception { is(both(greaterThanOrEqualTo(lowerBoundModifiedDate)).and(lessThanOrEqualTo(upperBoundModifiedDate))) ); // assert ILM is still stopped - GetStatusAction.Response statusResponse = client().execute(GetStatusAction.INSTANCE, new GetStatusAction.Request()).get(); + GetStatusAction.Response statusResponse = client().execute(GetStatusAction.INSTANCE, new AcknowledgedRequest.Plain()).get(); assertThat(statusResponse.getMode(), equalTo(OperationMode.STOPPED)); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index 1b52486f2b5ea..e013eb1520f29 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -248,6 +249,7 @@ private static List xContentEntries() { @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java index 5f2e1d49f90d5..186c7bd07961d 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ilm.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -31,7 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - GetStatusAction.Request request = new GetStatusAction.Request(); + AcknowledgedRequest.Plain request = new AcknowledgedRequest.Plain(); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(GetStatusAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleAction.java index e032b300a824e..9e3399bd220ae 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleAction.java @@ -33,7 +33,9 @@ import org.elasticsearch.xpack.core.ilm.ExplainLifecycleRequest; import org.elasticsearch.xpack.core.ilm.ExplainLifecycleResponse; import org.elasticsearch.xpack.core.ilm.IndexLifecycleExplainResponse; +import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.core.ilm.PhaseExecutionInfo; +import org.elasticsearch.xpack.core.ilm.RolloverAction; import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; import org.elasticsearch.xpack.ilm.IndexLifecycleService; @@ -42,6 +44,7 @@ import java.util.TreeMap; import static org.elasticsearch.index.IndexSettings.LIFECYCLE_ORIGINATION_DATE; +import static org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep.applyDefaultConditions; public class TransportExplainLifecycleAction extends TransportClusterInfoAction { @@ -80,6 +83,9 @@ protected void doMasterOperation( ClusterState state, ActionListener listener ) { + boolean rolloverOnlyIfHasDocuments = LifecycleSettings.LIFECYCLE_ROLLOVER_ONLY_IF_HAS_DOCUMENTS_SETTING.get( + state.metadata().settings() + ); Map indexResponses = new TreeMap<>(); for (String index : concreteIndices) { final IndexLifecycleExplainResponse indexResponse; @@ -90,7 +96,8 @@ protected void doMasterOperation( request.onlyErrors(), request.onlyManaged(), indexLifecycleService, - xContentRegistry + xContentRegistry, + rolloverOnlyIfHasDocuments ); } catch (IOException e) { listener.onFailure(new ElasticsearchParseException("failed to parse phase definition for index [" + index + "]", e)); @@ -111,7 +118,8 @@ static IndexLifecycleExplainResponse getIndexLifecycleExplainResponse( boolean onlyErrors, boolean onlyManaged, IndexLifecycleService indexLifecycleService, - NamedXContentRegistry xContentRegistry + NamedXContentRegistry xContentRegistry, + boolean rolloverOnlyIfHasDocuments ) throws IOException { IndexMetadata indexMetadata = metadata.index(indexName); Settings idxSettings = indexMetadata.getSettings(); @@ -136,6 +144,16 @@ static IndexLifecycleExplainResponse getIndexLifecycleExplainResponse( ) ) { phaseExecutionInfo = PhaseExecutionInfo.parse(parser, currentPhase); + + // Try to add default rollover conditions to the response. + var phase = phaseExecutionInfo.getPhase(); + if (phase != null) { + var rolloverAction = (RolloverAction) phase.getActions().get(RolloverAction.NAME); + if (rolloverAction != null) { + var conditions = applyDefaultConditions(rolloverAction.getConditions(), rolloverOnlyIfHasDocuments); + phase.getActions().put(RolloverAction.NAME, new RolloverAction(conditions)); + } + } } } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetStatusAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetStatusAction.java index 19eac02304835..b18cde4446c74 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetStatusAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetStatusAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -21,12 +22,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; -import org.elasticsearch.xpack.core.ilm.action.GetStatusAction.Request; import org.elasticsearch.xpack.core.ilm.action.GetStatusAction.Response; import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentILMMode; -public class TransportGetStatusAction extends TransportMasterNodeAction { +public class TransportGetStatusAction extends TransportMasterNodeAction { @Inject public TransportGetStatusAction( @@ -42,7 +42,7 @@ public TransportGetStatusAction( clusterService, threadPool, actionFilters, - Request::new, + AcknowledgedRequest.Plain::new, indexNameExpressionResolver, Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE @@ -50,12 +50,12 @@ public TransportGetStatusAction( } @Override - protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, AcknowledgedRequest.Plain request, ClusterState state, ActionListener listener) { listener.onResponse(new Response(currentILMMode(state))); } @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { + protected ClusterBlockException checkBlock(AcknowledgedRequest.Plain request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index f47fc38206183..dd1e2bb9d8dd7 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -282,13 +282,12 @@ private void verifyCanStopWithStep(String stoppableStep) { ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE); SetOnce changedOperationMode = new SetOnce<>(); doAnswer(invocationOnMock -> { + OperationModeUpdateTask task = (OperationModeUpdateTask) invocationOnMock.getArguments()[1]; + assertEquals(task.getILMOperationMode(), OperationMode.STOPPED); changedOperationMode.set(true); return null; }).when(clusterService) - .submitUnbatchedStateUpdateTask( - eq("ilm_operation_mode_update[stopped]"), - eq(OperationModeUpdateTask.ilmMode(OperationMode.STOPPED)) - ); + .submitUnbatchedStateUpdateTask(eq("ilm_operation_mode_update[stopped]"), any(OperationModeUpdateTask.class)); indexLifecycleService.applyClusterState(event); indexLifecycleService.triggerPolicies(currentState, true); assertTrue(changedOperationMode.get()); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java index 532a8f58b810e..fe39822d869d3 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -124,10 +125,10 @@ public void testModeSnapshotRestore() throws Exception { } private OperationMode ilmMode() throws Exception { - return client().execute(GetStatusAction.INSTANCE, new GetStatusAction.Request()).get().getMode(); + return client().execute(GetStatusAction.INSTANCE, new AcknowledgedRequest.Plain()).get().getMode(); } private OperationMode slmMode() throws Exception { - return client().execute(GetSLMStatusAction.INSTANCE, new GetSLMStatusAction.Request()).get().getOperationMode(); + return client().execute(GetSLMStatusAction.INSTANCE, new AcknowledgedRequest.Plain()).get().getOperationMode(); } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleActionTests.java index 246d2bcf21205..e705fe1624664 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportExplainLifecycleActionTests.java @@ -85,7 +85,8 @@ public void testGetIndexLifecycleExplainResponse() throws IOException { true, true, indexLifecycleService, - REGISTRY + REGISTRY, + randomBoolean() ); assertThat(onlyErrorsResponse, notNullValue()); assertThat(onlyErrorsResponse.getIndex(), is(indexInErrorStep)); @@ -118,7 +119,8 @@ public void testGetIndexLifecycleExplainResponse() throws IOException { true, true, indexLifecycleService, - REGISTRY + REGISTRY, + randomBoolean() ); assertThat(onlyErrorsResponse, nullValue()); @@ -128,7 +130,8 @@ public void testGetIndexLifecycleExplainResponse() throws IOException { false, true, indexLifecycleService, - REGISTRY + REGISTRY, + randomBoolean() ); assertThat(allManagedResponse, notNullValue()); assertThat(allManagedResponse.getIndex(), is(indexInCheckRolloverStep)); @@ -154,7 +157,8 @@ public void testGetIndexLifecycleExplainResponse() throws IOException { true, true, indexLifecycleService, - REGISTRY + REGISTRY, + randomBoolean() ); assertThat(onlyErrorsResponse, notNullValue()); assertThat(onlyErrorsResponse.getPolicyName(), is("random-policy")); @@ -179,9 +183,43 @@ public void testGetIndexLifecycleExplainResponse() throws IOException { false, true, indexLifecycleService, - REGISTRY + REGISTRY, + randomBoolean() ); assertThat(onlyManaged, nullValue()); } + + { + // validate addition of default condition with `rolloverOnlyIfHasDocuments` true + IndexLifecycleService indexLifecycleService = mock(IndexLifecycleService.class); + when(indexLifecycleService.policyExists("my-policy")).thenReturn(true); + + LifecycleExecutionState.Builder checkRolloverReadyStepState = LifecycleExecutionState.builder() + .setPhase("hot") + .setAction("rollover") + .setStep(WaitForRolloverReadyStep.NAME) + .setPhaseDefinition(PHASE_DEFINITION); + String indexInCheckRolloverStep = "index_in_check_rollover"; + IndexMetadata indexMetadata = IndexMetadata.builder(indexInCheckRolloverStep) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, "my-policy")) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .putCustom(ILM_CUSTOM_METADATA_KEY, checkRolloverReadyStepState.build().asMap()) + .build(); + Metadata metadata = Metadata.builder().put(indexMetadata, true).build(); + + IndexLifecycleExplainResponse response = getIndexLifecycleExplainResponse( + indexInCheckRolloverStep, + metadata, + false, + true, + indexLifecycleService, + REGISTRY, + true + ); + var rolloverAction = ((RolloverAction) response.getPhaseExecutionInfo().getPhase().getActions().get(RolloverAction.NAME)); + assertThat(rolloverAction, notNullValue()); + assertThat(rolloverAction.getConditions().getMinDocs(), is(1L)); + } } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 61278fcae6d94..84b6bb94503c3 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file has been contributed to by a Generative AI */ package org.elasticsearch.xpack.inference; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 50647ca328b23..d6d0eb0bbbf21 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -69,8 +69,8 @@ protected Collection> getPlugins() { } public void testStoreModel() throws Exception { - String modelId = "test-store-model"; - Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-store-model"; + Model model = buildElserModelConfig(inferenceEntityId, TaskType.SPARSE_EMBEDDING); AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -81,8 +81,8 @@ public void testStoreModel() throws Exception { } public void testStoreModelWithUnknownFields() throws Exception { - String modelId = "test-store-model-unknown-field"; - Model model = buildModelWithUnknownField(modelId); + String inferenceEntityId = "test-store-model-unknown-field"; + Model model = buildModelWithUnknownField(inferenceEntityId); AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -96,12 +96,12 @@ public void testStoreModelWithUnknownFields() throws Exception { statusException.getRootCause().getMessage(), containsString("mapping set to strict, dynamic introduction of [unknown_field] within [_doc] is not allowed") ); - assertThat(exceptionHolder.get().getMessage(), containsString("Failed to store inference model [" + modelId + "]")); + assertThat(exceptionHolder.get().getMessage(), containsString("Failed to store inference model [" + inferenceEntityId + "]")); } public void testGetModel() throws Exception { - String modelId = "test-get-model"; - Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-get-model"; + Model model = buildElserModelConfig(inferenceEntityId, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -110,7 +110,7 @@ public void testGetModel() throws Exception { // now get the model AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getModelWithSecrets(modelId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); @@ -118,7 +118,7 @@ public void testGetModel() throws Exception { var elserService = new ElserMlNodeService(new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class))); ElserMlNodeModel roundTripModel = elserService.parsePersistedConfigWithSecrets( - modelHolder.get().modelId(), + modelHolder.get().inferenceEntityId(), modelHolder.get().taskType(), modelHolder.get().settings(), modelHolder.get().secrets() @@ -127,8 +127,8 @@ public void testGetModel() throws Exception { } public void testStoreModelFailsWhenModelExists() throws Exception { - String modelId = "test-put-trained-model-config-exists"; - Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-put-trained-model-config-exists"; + Model model = buildElserModelConfig(inferenceEntityId, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -196,10 +196,10 @@ public void testGetModelsByTaskType() throws InterruptedException { assertThat(modelHolder.get(), hasSize(3)); var sparseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.SPARSE_EMBEDDING) - .map(Model::getModelId) + .map(Model::getInferenceEntityId) .collect(Collectors.toSet()); modelHolder.get().forEach(m -> { - assertTrue(sparseIds.contains(m.modelId())); + assertTrue(sparseIds.contains(m.inferenceEntityId())); assertThat(m.secrets().keySet(), empty()); }); @@ -207,10 +207,10 @@ public void testGetModelsByTaskType() throws InterruptedException { assertThat(modelHolder.get(), hasSize(2)); var denseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.TEXT_EMBEDDING) - .map(Model::getModelId) + .map(Model::getInferenceEntityId) .collect(Collectors.toSet()); modelHolder.get().forEach(m -> { - assertTrue(denseIds.contains(m.modelId())); + assertTrue(denseIds.contains(m.inferenceEntityId())); assertThat(m.secrets().keySet(), empty()); }); } @@ -238,9 +238,9 @@ public void testGetAllModels() throws InterruptedException { var getAllModels = modelHolder.get(); // sort in the same order as the returned models - createdModels.sort(Comparator.comparing(Model::getModelId)); + createdModels.sort(Comparator.comparing(Model::getInferenceEntityId)); for (int i = 0; i < modelCount; i++) { - assertEquals(createdModels.get(i).getModelId(), getAllModels.get(i).modelId()); + assertEquals(createdModels.get(i).getInferenceEntityId(), getAllModels.get(i).inferenceEntityId()); assertEquals(createdModels.get(i).getTaskType(), getAllModels.get(i).taskType()); assertEquals(createdModels.get(i).getConfigurations().getService(), getAllModels.get(i).service()); assertThat(getAllModels.get(i).secrets().keySet(), empty()); @@ -250,30 +250,30 @@ public void testGetAllModels() throws InterruptedException { @SuppressWarnings("unchecked") public void testGetModelWithSecrets() throws InterruptedException { var service = "foo"; - var modelId = "model-with-secrets"; + var inferenceEntityId = "model-with-secrets"; var secret = "abc"; AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - var modelWithSecrets = createModelWithSecrets(modelId, randomFrom(TaskType.values()), service, secret); + var modelWithSecrets = createModelWithSecrets(inferenceEntityId, randomFrom(TaskType.values()), service, secret); blockingCall(listener -> modelRegistry.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getModelWithSecrets(modelId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); assertThat(secretSettings.get("secret"), equalTo(secret)); // get model without secrets - blockingCall(listener -> modelRegistry.getModel(modelId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), empty()); } - private Model buildElserModelConfig(String modelId, TaskType taskType) { - return ElserMlNodeServiceTests.randomModelConfig(modelId, taskType); + private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) { + return ElserMlNodeServiceTests.randomModelConfig(inferenceEntityId, taskType); } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) @@ -291,10 +291,10 @@ protected void blockingCall(Consumer> function, AtomicRefe latch.await(); } - private static Model buildModelWithUnknownField(String modelId) { + private static Model buildModelWithUnknownField(String inferenceEntityId) { return new Model( new ModelWithUnknownField( - modelId, + inferenceEntityId, TaskType.SPARSE_EMBEDDING, ElserMlNodeService.NAME, ElserMlNodeServiceSettingsTests.createRandom(), @@ -303,13 +303,13 @@ private static Model buildModelWithUnknownField(String modelId) { ); } - public static Model createModel(String modelId, TaskType taskType, String service) { - return new Model(new ModelConfigurations(modelId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings())); + public static Model createModel(String inferenceEntityId, TaskType taskType, String service) { + return new Model(new ModelConfigurations(inferenceEntityId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings())); } - public static Model createModelWithSecrets(String modelId, TaskType taskType, String service, String secret) { + public static Model createModelWithSecrets(String inferenceEntityId, TaskType taskType, String service, String secret) { return new Model( - new ModelConfigurations(modelId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings()), + new ModelConfigurations(inferenceEntityId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings()), new ModelSecrets(new TestModelOfAnyKind.TestSecretSettings(secret)) ); } @@ -391,15 +391,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - TestModelOfAnyKind(String modelId, TaskType taskType, String service) { - super(modelId, taskType, service, new TestModelServiceSettings(), new TestTaskSettings()); + TestModelOfAnyKind(String inferenceEntityId, TaskType taskType, String service) { + super(inferenceEntityId, taskType, service, new TestModelServiceSettings(), new TestTaskSettings()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("unknown_field", "foo"); - builder.field(MODEL_ID, getModelId()); + builder.field(MODEL_ID, getInferenceEntityId()); builder.field(TaskType.NAME, getTaskType().toString()); builder.field(SERVICE, getService()); builder.field(SERVICE_SETTINGS, getServiceSettings()); @@ -412,20 +412,20 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private static class ModelWithUnknownField extends ModelConfigurations { ModelWithUnknownField( - String modelId, + String inferenceEntityId, TaskType taskType, String service, ServiceSettings serviceSettings, TaskSettings taskSettings ) { - super(modelId, taskType, service, serviceSettings, taskSettings); + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("unknown_field", "foo"); - builder.field(MODEL_ID, getModelId()); + builder.field(MODEL_ID, getInferenceEntityId()); builder.field(TaskType.NAME, getTaskType().toString()); builder.field(SERVICE, getService()); builder.field(SERVICE_SETTINGS, getServiceSettings()); diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 3879a0a344e06..2d25a48117778 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -22,10 +22,5 @@ exports org.elasticsearch.xpack.inference.registry; exports org.elasticsearch.xpack.inference.rest; exports org.elasticsearch.xpack.inference.services; - exports org.elasticsearch.xpack.inference.external.http.sender; - exports org.elasticsearch.xpack.inference.external.http; - exports org.elasticsearch.xpack.inference.services.elser; - exports org.elasticsearch.xpack.inference.services.huggingface.elser; - exports org.elasticsearch.xpack.inference.services.openai; exports org.elasticsearch.xpack.inference; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index c632c568fea16..c23e245b5696c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -16,7 +16,11 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; @@ -49,6 +53,9 @@ public static List getNamedWriteables() { namedWriteables.add( new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingResults.NAME, TextEmbeddingResults::new) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingByteResults.NAME, TextEmbeddingByteResults::new) + ); // Empty default task settings namedWriteables.add(new NamedWriteableRegistry.Entry(TaskSettings.class, EmptyTaskSettings.NAME, EmptyTaskSettings::new)); @@ -87,6 +94,21 @@ public static List getNamedWriteables() { new NamedWriteableRegistry.Entry(TaskSettings.class, OpenAiEmbeddingsTaskSettings.NAME, OpenAiEmbeddingsTaskSettings::new) ); + // Cohere + namedWriteables.add( + new NamedWriteableRegistry.Entry(ServiceSettings.class, CohereServiceSettings.NAME, CohereServiceSettings::new) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + CohereEmbeddingsServiceSettings.NAME, + CohereEmbeddingsServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(TaskSettings.class, CohereEmbeddingsTaskSettings.NAME, CohereEmbeddingsTaskSettings::new) + ); + return namedWriteables; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 33d71c65ed643..ea62ca8620bf5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -54,6 +54,7 @@ import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; @@ -71,8 +72,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; private final Settings settings; - // We'll keep a reference to the http manager just in case the inference services don't get closed individually - private final SetOnce httpManager = new SetOnce<>(); private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); @@ -97,6 +96,7 @@ public InferencePlugin(Settings settings) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -118,11 +118,9 @@ public Collection createComponents(PluginServices services) { var truncator = new Truncator(settings, services.clusterService()); serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings, truncator)); - httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager)); - var httpRequestSenderFactory = new HttpRequestSenderFactory( services.threadPool(), - httpManager.get(), + HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager), services.clusterService(), settings ); @@ -154,7 +152,8 @@ public List getInferenceServiceFactories() { ElserMlNodeService::new, context -> new HuggingFaceElserService(httpFactory, serviceComponents), context -> new HuggingFaceService(httpFactory, serviceComponents), - context -> new OpenAiService(httpFactory, serviceComponents) + context -> new OpenAiService(httpFactory, serviceComponents), + context -> new CohereService(httpFactory, serviceComponents) ); } @@ -233,6 +232,6 @@ public void close() { var serviceComponentsRef = serviceComponents.get(); var throttlerToClose = serviceComponentsRef != null ? serviceComponentsRef.throttlerManager() : null; - IOUtils.closeWhileHandlingException(httpManager.get(), throttlerToClose); + IOUtils.closeWhileHandlingException(inferenceServiceRegistry.get(), throttlerToClose); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index ceb9fb92e3fab..6a5a3f5a137e1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -7,8 +7,12 @@ package org.elasticsearch.xpack.inference.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -18,6 +22,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -26,7 +32,10 @@ public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { + private static final Logger logger = LogManager.getLogger(TransportDeleteInferenceModelAction.class); + private final ModelRegistry modelRegistry; + private final InferenceServiceRegistry serviceRegistry; @Inject public TransportDeleteInferenceModelAction( @@ -35,7 +44,8 @@ public TransportDeleteInferenceModelAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ModelRegistry modelRegistry + ModelRegistry modelRegistry, + InferenceServiceRegistry serviceRegistry ) { super( DeleteInferenceModelAction.NAME, @@ -48,6 +58,7 @@ public TransportDeleteInferenceModelAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.modelRegistry = modelRegistry; + this.serviceRegistry = serviceRegistry; } @Override @@ -57,11 +68,34 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - modelRegistry.deleteModel(request.getModelId(), listener.delegateFailureAndWrap((l, r) -> l.onResponse(AcknowledgedResponse.TRUE))); + SubscribableListener.newForked(modelConfigListener -> { + modelRegistry.getModel(request.getInferenceEntityId(), modelConfigListener); + }).andThen((l1, unparsedModel) -> { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isPresent()) { + service.get().stop(request.getInferenceEntityId(), l1); + } else { + l1.onFailure( + new ElasticsearchStatusException("No service found for model " + request.getInferenceEntityId(), RestStatus.NOT_FOUND) + ); + } + }).andThen((l2, didStop) -> { + if (didStop) { + modelRegistry.deleteModel(request.getInferenceEntityId(), l2); + } else { + l2.onFailure( + new ElasticsearchStatusException( + "Failed to stop model " + request.getInferenceEntityId(), + RestStatus.INTERNAL_SERVER_ERROR + ) + ); + } + }).addListener(listener.delegateFailure((l3, didDeleteModel) -> listener.onResponse(AcknowledgedResponse.of(didDeleteModel)))); } @Override protected ClusterBlockException checkBlock(DeleteInferenceModelAction.Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index a7f5fb6c6c9a0..2de1aecea118c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -64,19 +64,23 @@ protected void doExecute( GetInferenceModelAction.Request request, ActionListener listener ) { - boolean modelIdIsWildCard = Strings.isAllOrWildcard(request.getModelId()); + boolean inferenceEntityIdIsWildCard = Strings.isAllOrWildcard(request.getInferenceEntityId()); - if (request.getTaskType() == TaskType.ANY && modelIdIsWildCard) { + if (request.getTaskType() == TaskType.ANY && inferenceEntityIdIsWildCard) { getAllModels(listener); - } else if (modelIdIsWildCard) { + } else if (inferenceEntityIdIsWildCard) { getModelsByTaskType(request.getTaskType(), listener); } else { - getSingleModel(request.getModelId(), request.getTaskType(), listener); + getSingleModel(request.getInferenceEntityId(), request.getTaskType(), listener); } } - private void getSingleModel(String modelId, TaskType requestedTaskType, ActionListener listener) { - modelRegistry.getModel(modelId, listener.delegateFailureAndWrap((delegate, unparsedModel) -> { + private void getSingleModel( + String inferenceEntityId, + TaskType requestedTaskType, + ActionListener listener + ) { + modelRegistry.getModel(inferenceEntityId, listener.delegateFailureAndWrap((delegate, unparsedModel) -> { var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { delegate.onFailure( @@ -84,7 +88,7 @@ private void getSingleModel(String modelId, TaskType requestedTaskType, ActionLi "Unknown service [{}] for model [{}]. ", RestStatus.INTERNAL_SERVER_ERROR, unparsedModel.service(), - unparsedModel.modelId() + unparsedModel.inferenceEntityId() ) ); return; @@ -102,7 +106,8 @@ private void getSingleModel(String modelId, TaskType requestedTaskType, ActionLi return; } - var model = service.get().parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); + var model = service.get() + .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); delegate.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); })); } @@ -130,12 +135,12 @@ private GetInferenceModelAction.Response parseModels(List config, Set platformArchitectures, ActionListener listener ) { - var model = service.parseRequestConfig(modelId, taskType, config, platformArchitectures); + var model = service.parseRequestConfig(inferenceEntityId, taskType, config, platformArchitectures); service.checkModelConfig( model, @@ -186,11 +197,20 @@ private void parseAndStoreModel( ); } - private static void startModel(InferenceService service, Model model, ActionListener listener) { - service.start( - model, - listener.delegateFailureAndWrap((l, ok) -> l.onResponse(new PutInferenceModelAction.Response(model.getConfigurations()))) - ); + private static void startModel(InferenceService service, Model model, ActionListener finalListener) { + SubscribableListener.newForked((listener1) -> { service.putModel(model, listener1); }).< + PutInferenceModelAction.Response>andThen((listener2, modelDidPut) -> { + if (modelDidPut) { + service.start( + model, + listener2.delegateFailureAndWrap( + (l3, ok) -> l3.onResponse(new PutInferenceModelAction.Response(model.getConfigurations())) + ) + ); + } else { + logger.warn("Failed to put model [{}]", model.getInferenceEntityId()); + } + }).addListener(finalListener); } private Map requestToMap(PutInferenceModelAction.Request request) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ActionUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ActionUtils.java index 856146fafcb45..e4d6e39fdf1f2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ActionUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ActionUtils.java @@ -11,9 +11,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; +import java.net.URI; + public class ActionUtils { public static ActionListener wrapFailuresInElasticsearchException( @@ -35,5 +39,13 @@ public static ElasticsearchStatusException createInternalServerError(Throwable e return new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR, e); } + public static String constructFailedToSendRequestMessage(@Nullable URI uri, String message) { + if (uri != null) { + return Strings.format("Failed to send %s request to [%s]", message, uri); + } + + return Strings.format("Failed to send %s request", message); + } + private ActionUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java new file mode 100644 index 0000000000000..8c9d70f0a7323 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; + +import java.util.Map; +import java.util.Objects; + +/** + * Provides a way to construct an {@link ExecutableAction} using the visitor pattern based on the cohere model type. + */ +public class CohereActionCreator implements CohereActionVisitor { + private final Sender sender; + private final ServiceComponents serviceComponents; + + public CohereActionCreator(Sender sender, ServiceComponents serviceComponents) { + this.sender = Objects.requireNonNull(sender); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + } + + @Override + public ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings) { + var overriddenModel = model.overrideWith(taskSettings); + + return new CohereEmbeddingsAction(sender, overriddenModel, serviceComponents); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java new file mode 100644 index 0000000000000..1500d48e3c201 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; + +import java.util.Map; + +public interface CohereActionVisitor { + ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java new file mode 100644 index 0000000000000..ae66496abbb1f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.cohere.CohereResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.cohere.CohereEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.cohere.CohereEmbeddingsResponseEntity; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class CohereEmbeddingsAction implements ExecutableAction { + private static final Logger logger = LogManager.getLogger(CohereEmbeddingsAction.class); + private static final ResponseHandler HANDLER = createEmbeddingsHandler(); + + private final CohereAccount account; + private final CohereEmbeddingsModel model; + private final String failedToSendRequestErrorMessage; + private final RetryingHttpSender sender; + + public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model, ServiceComponents serviceComponents) { + this.model = Objects.requireNonNull(model); + this.account = new CohereAccount( + this.model.getServiceSettings().getCommonSettings().getUri(), + this.model.getSecretSettings().apiKey() + ); + this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + this.model.getServiceSettings().getCommonSettings().getUri(), + "Cohere embeddings" + ); + this.sender = new RetryingHttpSender( + Objects.requireNonNull(sender), + serviceComponents.throttlerManager(), + logger, + new RetrySettings(serviceComponents.settings()), + serviceComponents.threadPool() + ); + } + + @Override + public void execute(List input, ActionListener listener) { + try { + CohereEmbeddingsRequest request = new CohereEmbeddingsRequest(account, input, model); + ActionListener wrappedListener = wrapFailuresInElasticsearchException( + failedToSendRequestErrorMessage, + listener + ); + + sender.send(request, HANDLER, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, failedToSendRequestErrorMessage)); + } + } + + private static ResponseHandler createEmbeddingsHandler() { + return new CohereResponseHandler("cohere text embedding", CohereEmbeddingsResponseEntity::fromResponse); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java index 0fa6edb043611..67c5fda5f83a0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -39,7 +39,7 @@ public class HuggingFaceAction implements ExecutableAction { private final RetryingHttpSender sender; private final ResponseHandler responseHandler; private final Truncator truncator; - private final Integer tokenLimit; + private final HuggingFaceModel model; public HuggingFaceAction( Sender sender, @@ -49,11 +49,10 @@ public HuggingFaceAction( String requestType ) { Objects.requireNonNull(serviceComponents); - Objects.requireNonNull(model); Objects.requireNonNull(requestType); + this.model = Objects.requireNonNull(model); this.responseHandler = Objects.requireNonNull(responseHandler); - this.sender = new RetryingHttpSender( Objects.requireNonNull(sender), serviceComponents.throttlerManager(), @@ -62,17 +61,20 @@ public HuggingFaceAction( serviceComponents.threadPool() ); this.account = new HuggingFaceAccount(model.getUri(), model.getApiKey()); - this.errorMessage = format("Failed to send Hugging Face %s request to [%s]", requestType, model.getUri().toString()); + this.errorMessage = format( + "Failed to send Hugging Face %s request from inference entity id [%s]", + requestType, + model.getInferenceEntityId() + ); this.truncator = Objects.requireNonNull(serviceComponents.truncator()); - this.tokenLimit = model.getTokenLimit(); } @Override public void execute(List input, ActionListener listener) { try { - var truncatedInput = truncate(input, tokenLimit); + var truncatedInput = truncate(input, model.getTokenLimit()); - HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(truncator, account, truncatedInput); + HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(truncator, account, truncatedInput, model); ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); sender.send(request, responseHandler, wrappedListener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java index 20128f1168bb9..2e804dfeb6a4f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; @@ -20,12 +19,11 @@ import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import java.net.URI; import java.util.List; import java.util.Objects; -import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; @@ -45,24 +43,16 @@ public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, Servic this.model.getSecretSettings().apiKey() ); this.client = new OpenAiClient(Objects.requireNonNull(sender), Objects.requireNonNull(serviceComponents)); - this.errorMessage = getErrorMessage(this.model.getServiceSettings().uri()); + this.errorMessage = constructFailedToSendRequestMessage(this.model.getServiceSettings().uri(), "OpenAI embeddings"); this.truncator = Objects.requireNonNull(serviceComponents.truncator()); } - private static String getErrorMessage(@Nullable URI uri) { - if (uri != null) { - return format("Failed to send OpenAI embeddings request to [%s]", uri.toString()); - } - - return "Failed to send OpenAI embeddings request"; - } - @Override public void execute(List input, ActionListener listener) { try { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); - OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model.getTaskSettings()); + OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); client.send(request, wrappedListener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java new file mode 100644 index 0000000000000..9847d496d14ee --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.cohere; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; + +import java.net.URI; +import java.util.Objects; + +public record CohereAccount(@Nullable URI url, SecureString apiKey) { + + public CohereAccount { + Objects.requireNonNull(apiKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java new file mode 100644 index 0000000000000..c7e6493949400 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.cohere; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.cohere.CohereErrorResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; + +/** + * Defines how to handle various errors returned from the Cohere integration. + * + * NOTE: + * These headers are returned for trial API keys only (they also do not exist within 429 responses) + * + * + * x-endpoint-monthly-call-limit + * x-trial-endpoint-call-limit + * x-trial-endpoint-call-remaining + * + */ +public class CohereResponseHandler extends BaseResponseHandler { + static final String TEXTS_ARRAY_TOO_LARGE_MESSAGE_MATCHER = "invalid request: total number of texts must be at most"; + static final String TEXTS_ARRAY_ERROR_MESSAGE = "Received a texts array too large response"; + + public CohereResponseHandler(String requestType, ResponseParser parseFunction) { + super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse); + } + + @Override + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) + throws RetryException { + checkForFailureStatusCode(request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } + + /** + * Validates the status code throws an RetryException if not in the range [200, 300). + * + * @param request The http request + * @param result The http response and body + * @throws RetryException Throws if status code is {@code >= 300 or < 200 } + */ + void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException { + int statusCode = result.response().getStatusLine().getStatusCode(); + if (statusCode >= 200 && statusCode < 300) { + return; + } + + // handle error codes + if (statusCode >= 500) { + throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 429) { + throw new RetryException(true, buildError(RATE_LIMIT, request, result)); + } else if (isTextsArrayTooLarge(result)) { + throw new RetryException(false, buildError(TEXTS_ARRAY_ERROR_MESSAGE, request, result)); + } else if (statusCode == 401) { + throw new RetryException(false, buildError(AUTHENTICATION, request, result)); + } else if (statusCode >= 300 && statusCode < 400) { + throw new RetryException(false, buildError(REDIRECTION, request, result)); + } else { + throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); + } + } + + private static boolean isTextsArrayTooLarge(HttpResult result) { + int statusCode = result.response().getStatusLine().getStatusCode(); + + if (statusCode == 400) { + var errorEntity = CohereErrorResponseEntity.fromResponse(result); + return errorEntity != null && errorEntity.getErrorMessage().contains(TEXTS_ARRAY_TOO_LARGE_MESSAGE_MATCHER); + } + + return false; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java index 73d1fa1c32568..99631c380b9fa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.external.http; import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.concurrent.FutureCallback; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; @@ -19,6 +18,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -83,11 +83,11 @@ public void start() { } } - public void send(HttpUriRequest request, HttpClientContext context, ActionListener listener) throws IOException { + public void send(HttpRequest request, HttpClientContext context, ActionListener listener) throws IOException { // The caller must call start() first before attempting to send a request assert status.get() == Status.STARTED : "call start() before attempting to send a request"; - SocketAccess.doPrivileged(() -> client.execute(request, context, new FutureCallback<>() { + SocketAccess.doPrivileged(() -> client.execute(request.httpRequestBase(), context, new FutureCallback<>() { @Override public void completed(HttpResponse response) { respondUsingUtilityThread(response, request, listener); @@ -95,23 +95,30 @@ public void completed(HttpResponse response) { @Override public void failed(Exception ex) { - throttlerManager.warn(logger, format("Request [%s] failed", request.getRequestLine()), ex); + throttlerManager.warn(logger, format("Request from inference entity id [%s] failed", request.inferenceEntityId()), ex); failUsingUtilityThread(ex, listener); } @Override public void cancelled() { - failUsingUtilityThread(new CancellationException(format("Request [%s] was cancelled", request.getRequestLine())), listener); + failUsingUtilityThread( + new CancellationException(format("Request from inference entity id [%s] was cancelled", request.inferenceEntityId())), + listener + ); } })); } - private void respondUsingUtilityThread(HttpResponse response, HttpUriRequest request, ActionListener listener) { + private void respondUsingUtilityThread(HttpResponse response, HttpRequest request, ActionListener listener) { threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> { try { listener.onResponse(HttpResult.create(settings.getMaxResponseSize(), response)); } catch (Exception e) { - throttlerManager.warn(logger, format("Failed to create http result for [%s]", request.getRequestLine()), e); + throttlerManager.warn( + logger, + format("Failed to create http result from inference entity id [%s]", request.inferenceEntityId()), + e + ); listener.onFailure(e); } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java index b6dbc6d6f2911..9f2ceddc92a2e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java @@ -7,20 +7,15 @@ package org.elasticsearch.xpack.inference.external.http; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import static org.elasticsearch.core.Strings.format; public class HttpUtils { - public static void checkForFailureStatusCode( - ThrottlerManager throttlerManager, - Logger logger, - HttpRequestBase request, - HttpResult result - ) { + public static void checkForFailureStatusCode(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) { if (result.response().getStatusLine().getStatusCode() >= 300) { String message = getStatusCodeErrorMessage(request, result); @@ -30,19 +25,19 @@ public static void checkForFailureStatusCode( } } - private static String getStatusCodeErrorMessage(HttpRequestBase request, HttpResult result) { + private static String getStatusCodeErrorMessage(Request request, HttpResult result) { int statusCode = result.response().getStatusLine().getStatusCode(); if (statusCode >= 400) { return format( - "Received a failure status code for request [%s] status [%s]", - request.getRequestLine(), + "Received a failure status code for request from inference entity id [%s] status [%s]", + request.getInferenceEntityId(), result.response().getStatusLine().getStatusCode() ); } else if (statusCode >= 300) { return format( - "Unhandled redirection for request [%s] status [%s]", - request.getRequestLine(), + "Unhandled redirection for request from inference entity id [%s] status [%s]", + request.getInferenceEntityId(), result.response().getStatusLine().getStatusCode() ); } else { @@ -50,9 +45,9 @@ private static String getStatusCodeErrorMessage(HttpRequestBase request, HttpRes } } - public static void checkForEmptyBody(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) { + public static void checkForEmptyBody(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) { if (result.isBodyEmpty()) { - String message = format("Response body was empty for request [%s]", request.getRequestLine()); + String message = format("Response body was empty for request from inference entity id [%s]", request.getInferenceEntityId()); throttlerManager.warn(logger, message); throw new IllegalStateException(message); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java index d95c690917d50..0349e858d9b22 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.retry; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; @@ -36,7 +35,7 @@ public AlwaysRetryingResponseHandler( this.parseFunction = Objects.requireNonNull(parseFunction); } - public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) throws RetryException { try { checkForFailureStatusCode(throttlerManager, logger, request, result); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index e40f4efad348c..b703cf2f14b75 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.retry; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; @@ -52,22 +51,27 @@ public String getRequestType() { return requestType; } - protected Exception buildError(String message, HttpRequestBase request, HttpResult result) { + protected Exception buildError(String message, Request request, HttpResult result) { var errorEntityMsg = errorParseFunction.apply(result); var responseStatusCode = result.response().getStatusLine().getStatusCode(); if (errorEntityMsg == null) { return new ElasticsearchStatusException( - format("%s for request [%s] status [%s]", message, request.getRequestLine(), responseStatusCode), + format( + "%s for request from inference entity id [%s] status [%s]", + message, + request.getInferenceEntityId(), + responseStatusCode + ), toRestStatus(responseStatusCode) ); } return new ElasticsearchStatusException( format( - "%s for request [%s] status [%s]. Error message: [%s]", + "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", message, - request.getRequestLine(), + request.getInferenceEntityId(), responseStatusCode, errorEntityMsg.getErrorMessage() ), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java index cb05af18e6a09..3d38c1941e52f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.retry; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -30,8 +29,7 @@ public interface ResponseHandler { * @param result the response from the server * @throws RetryException if the response is invalid */ - void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) - throws RetryException; + void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) throws RetryException; /** * A method for parsing the response from the server. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandlerUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandlerUtils.java new file mode 100644 index 0000000000000..6269c81d4ceb7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandlerUtils.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.HttpResponse; + +public class ResponseHandlerUtils { + public static String getFirstHeaderOrUnknown(HttpResponse response, String name) { + var header = response.getFirstHeader(name); + if (header != null && header.getElements().length > 0) { + return header.getElements()[0].getName(); + } + return "unknown"; + } + + private ResponseHandlerUtils() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java index 123b921cc7d30..d8476c7c583d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.retry; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; @@ -84,24 +83,22 @@ private class InternalRetrier extends RetryableAction { @Override public void tryAction(ActionListener listener) { - var httpRequest = request.createRequest(); - ActionListener responseListener = ActionListener.wrap(result -> { try { - responseHandler.validateResponse(throttlerManager, logger, httpRequest, result); + responseHandler.validateResponse(throttlerManager, logger, request, result); InferenceServiceResults inferenceResults = responseHandler.parseResult(request, result); listener.onResponse(inferenceResults); } catch (Exception e) { - logException(httpRequest, result, responseHandler.getRequestType(), e); + logException(request, result, responseHandler.getRequestType(), e); listener.onFailure(e); } }, e -> { - logException(httpRequest, responseHandler.getRequestType(), e); + logException(request, responseHandler.getRequestType(), e); listener.onFailure(transformIfRetryable(e)); }); - sender.send(httpRequest, responseListener); + sender.send(request.createHttpRequest(), responseListener); } @Override @@ -146,24 +143,24 @@ public void send(Request request, ResponseHandler responseHandler, ActionListene retrier.run(); } - private void logException(HttpRequestBase request, String requestType, Exception exception) { + private void logException(Request request, String requestType, Exception exception) { var causeException = ExceptionsHelper.unwrapCause(exception); throttlerManager.warn( logger, - format("Failed while sending request [%s] of type [%s]", request.getRequestLine(), requestType), + format("Failed while sending request from inference entity id [%s] of type [%s]", request.getInferenceEntityId(), requestType), causeException ); } - private void logException(HttpRequestBase request, HttpResult result, String requestType, Exception exception) { + private void logException(Request request, HttpResult result, String requestType, Exception exception) { var causeException = ExceptionsHelper.unwrapCause(exception); throttlerManager.warn( logger, format( - "Failed to process the response for request [%s] of type [%s] with status [%s] [%s]", - request.getRequestLine(), + "Failed to process the response for request from inference entity id [%s] of type [%s] with status [%s] [%s]", + request.getInferenceEntityId(), requestType, result.response().getStatusLine().getStatusCode(), result.response().getStatusLine().getReasonPhrase() diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java index 1e066410506bc..84aac7cde6bf5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -19,6 +18,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import java.util.ArrayList; import java.util.Collection; @@ -222,7 +222,7 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE * If null, then the request will wait forever * @param listener an {@link ActionListener} for the response or failure */ - public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener) { + public void send(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener) { RequestTask task = new RequestTask(request, httpClient, httpContext, timeout, threadPool, listener); if (isShutdown()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java index acc7a0b3f6077..edceb8324fbc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -20,6 +19,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import java.io.IOException; import java.util.List; @@ -135,7 +135,7 @@ public void close() throws IOException { * connection from the connection pool * @param listener a listener to handle the response */ - public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener) { + public void send(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); service.send(request, timeout, listener); @@ -156,7 +156,7 @@ private void waitForStartToComplete() { * @param request the http request to send * @param listener a listener to handle the response */ - public void send(HttpRequestBase request, ActionListener listener) { + public void send(HttpRequest request, ActionListener listener) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); service.send(request, maxRequestTimeout, listener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 5875126190e5d..2eefff791b709 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -20,6 +19,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; @@ -31,7 +31,7 @@ class RequestTask extends HttpTask { private static final Logger logger = LogManager.getLogger(RequestTask.class); private static final Scheduler.Cancellable NOOP_TIMEOUT_HANDLER = createDefaultHandler(); - private final HttpUriRequest request; + private final HttpRequest request; private final ActionListener listener; private final Scheduler.Cancellable timeoutHandler; private final AtomicBoolean notified = new AtomicBoolean(); @@ -39,7 +39,7 @@ class RequestTask extends HttpTask { private final Runnable command; RequestTask( - HttpUriRequest request, + HttpRequest request, HttpClient httpClient, HttpClientContext context, @Nullable TimeValue timeout, @@ -73,7 +73,13 @@ private Scheduler.Cancellable startTimer(ThreadPool threadPool, TimeValue timeou private void onTimeout() { assert timeout != null : "timeout must be defined to use a timeout handler"; - logger.debug(() -> format("Request [%s] timed out after [%s] while waiting to be executed", request.getRequestLine(), timeout)); + logger.debug( + () -> format( + "Request from inference entity id [%s] timed out after [%s] while waiting to be executed", + request.inferenceEntityId(), + timeout + ) + ); notifyOfResult( () -> listener.onFailure( new ElasticsearchTimeoutException(format("Request timed out waiting to be executed after [%s]", timeout)) @@ -84,10 +90,7 @@ private void onTimeout() { private void notifyOfResult(Runnable runnable) { if (notified.compareAndSet(false, true)) { runnable.run(); - return; } - - logger.debug(() -> format("Attempting to notify of result after already doing so for request [%s]", request.getRequestLine())); } @Override @@ -101,7 +104,7 @@ protected void doRun() { try { command.run(); } catch (Exception e) { - String message = format("Failed while executing request [%s]", request.getRequestLine()); + String message = format("Failed while executing request from inference entity id [%s]", request.inferenceEntityId()); logger.warn(message, e); onFailure(new ElasticsearchException(message, e)); } @@ -114,7 +117,7 @@ private void onSuccess(HttpResult result) { @Override public String toString() { - return request.getRequestLine().toString(); + return request.inferenceEntityId(); } private static Scheduler.Cancellable createDefaultHandler() { @@ -133,7 +136,7 @@ public boolean isCancelled() { private record Command( HttpClient httpClient, - HttpUriRequest requestToSend, + HttpRequest requestToSend, HttpClientContext context, ActionListener resultListener ) implements Runnable { @@ -143,9 +146,15 @@ public void run() { try { httpClient.send(requestToSend, context, resultListener); } catch (Exception e) { - logger.warn(format("Failed to send request [%s] via the http client", requestToSend.getRequestLine()), e); + logger.warn( + format("Failed to send request from inference entity id [%s] via the http client", requestToSend.inferenceEntityId()), + e + ); resultListener.onFailure( - new ElasticsearchException(format("Failed to send request [%s]", requestToSend.getRequestLine()), e) + new ElasticsearchException( + format("Failed to send request from inference entity id [%s]", requestToSend.inferenceEntityId()), + e + ) ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index abef521c77fc6..f1a0e112219fd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -7,18 +7,18 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import java.io.Closeable; public interface Sender extends Closeable { void start(); - void send(HttpRequestBase request, ActionListener listener); + void send(HttpRequest request, ActionListener listener); - void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener); + void send(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java index 64e5460bcce15..59804b37e465b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java @@ -7,13 +7,13 @@ package org.elasticsearch.xpack.inference.external.huggingface; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceErrorResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -26,7 +26,7 @@ public HuggingFaceResponseHandler(String requestType, ResponseParser parseFuncti } @Override - public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) throws RetryException { checkForFailureStatusCode(request, result); checkForEmptyBody(throttlerManager, logger, request, result); @@ -40,7 +40,7 @@ public void validateResponse(ThrottlerManager throttlerManager, Logger logger, H * @param result the http response and body * @throws RetryException thrown if status code is {@code >= 300 or < 200} */ - void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException { int statusCode = result.response().getStatusLine().getStatusCode(); if (statusCode >= 200 && statusCode < 300) { return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 207e3c2bbd035..10083d3fd4667 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.external.openai; -import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -16,10 +14,12 @@ import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; +import static org.elasticsearch.xpack.inference.external.http.retry.ResponseHandlerUtils.getFirstHeaderOrUnknown; public class OpenAiResponseHandler extends BaseResponseHandler { /** @@ -35,13 +35,14 @@ public class OpenAiResponseHandler extends BaseResponseHandler { static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; static final String CONTENT_TOO_LARGE_MESSAGE = "Please reduce your prompt; or completion length."; + static final String OPENAI_SERVER_BUSY = "Received a server busy error status code"; public OpenAiResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); } @Override - public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) throws RetryException { checkForFailureStatusCode(request, result); checkForEmptyBody(throttlerManager, logger, request, result); @@ -51,18 +52,22 @@ public void validateResponse(ThrottlerManager throttlerManager, Logger logger, H * Validates the status code throws an RetryException if not in the range [200, 300). * * The OpenAI API error codes are documented here. - * @param request The http request + * @param request The originating request * @param result The http response and body * @throws RetryException Throws if status code is {@code >= 300 or < 200 } */ - void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException { int statusCode = result.response().getStatusLine().getStatusCode(); if (statusCode >= 200 && statusCode < 300) { return; } // handle error codes - if (statusCode >= 500) { + if (statusCode == 500) { + throw new RetryException(true, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 503) { + throw new RetryException(true, buildError(OPENAI_SERVER_BUSY, request, result)); + } else if (statusCode > 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { throw new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); @@ -110,12 +115,4 @@ static String buildRateLimitErrorMessage(HttpResult result) { return RATE_LIMIT + ". " + usageMessage; } - - private static String getFirstHeaderOrUnknown(HttpResponse response, String name) { - var header = response.getFirstHeader(name); - if (header != null && header.getElements().length > 0) { - return header.getElements()[0].getName(); - } - return "unknown"; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/HttpRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/HttpRequest.java new file mode 100644 index 0000000000000..d81d16d6cbe10 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/HttpRequest.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request; + +import org.apache.http.client.methods.HttpRequestBase; + +import java.util.Objects; + +/** + * Provides a thin wrapper to give access the inference entity id that manages the settings for this request. + */ +public record HttpRequest(HttpRequestBase httpRequestBase, String inferenceEntityId) { + public HttpRequest { + Objects.requireNonNull(httpRequestBase); + Objects.requireNonNull(inferenceEntityId); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java index a0b31f7b9dbe9..bfc32ecbc794d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.inference.external.request; -import org.apache.http.client.methods.HttpRequestBase; - import java.net.URI; public interface Request { - HttpRequestBase createRequest(); + HttpRequest createHttpRequest(); URI getURI(); @@ -27,4 +25,10 @@ public interface Request { * sent to the 3rd party server. */ boolean[] getTruncationInfo(); + + /** + * Provides access to an identifier to determine which inference configuration this request originated from. + * @return the unique identifier for the inference entity configuration + */ + String getInferenceEntityId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/RequestUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/RequestUtils.java index 355db7288dacc..6116b1cc234c6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/RequestUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/RequestUtils.java @@ -10,7 +10,14 @@ import org.apache.http.Header; import org.apache.http.HttpHeaders; import org.apache.http.message.BasicHeader; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.rest.RestStatus; + +import java.net.URI; +import java.net.URISyntaxException; public class RequestUtils { @@ -18,5 +25,14 @@ public static Header createAuthBearerHeader(SecureString apiKey) { return new BasicHeader(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey.toString()); } + public static URI buildUri(URI accountUri, String service, CheckedSupplier uriBuilder) { + try { + return accountUri == null ? uriBuilder.get() : accountUri; + } catch (URISyntaxException e) { + // using bad request here so that potentially sensitive URL information does not get logged + throw new ElasticsearchStatusException(Strings.format("Failed to construct %s URL", service), RestStatus.BAD_REQUEST, e); + } + } + private RequestUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java new file mode 100644 index 0000000000000..8cacbd0f16aaf --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; + +public class CohereEmbeddingsRequest implements Request { + + private final CohereAccount account; + private final List input; + private final URI uri; + private final CohereEmbeddingsTaskSettings taskSettings; + private final String model; + private final CohereEmbeddingType embeddingType; + private final String modelId; + + public CohereEmbeddingsRequest(CohereAccount account, List input, CohereEmbeddingsModel embeddingsModel) { + Objects.requireNonNull(embeddingsModel); + + this.account = Objects.requireNonNull(account); + this.input = Objects.requireNonNull(input); + uri = buildUri(this.account.url(), "Cohere", CohereEmbeddingsRequest::buildDefaultUri); + taskSettings = embeddingsModel.getTaskSettings(); + model = embeddingsModel.getServiceSettings().getCommonSettings().getModel(); + embeddingType = embeddingsModel.getServiceSettings().getEmbeddingType(); + modelId = embeddingsModel.getInferenceEntityId(); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString(new CohereEmbeddingsRequestEntity(input, taskSettings, model, embeddingType)).getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); + + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return modelId; + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + return this; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + + // default for testing + static URI buildDefaultUri() throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(CohereUtils.HOST) + .setPathSegments(CohereUtils.VERSION_1, CohereUtils.EMBEDDINGS_PATH) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java new file mode 100644 index 0000000000000..a0b5444ee45e4 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record CohereEmbeddingsRequestEntity( + List input, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType +) implements ToXContentObject { + + private static final String SEARCH_DOCUMENT = "search_document"; + private static final String SEARCH_QUERY = "search_query"; + /** + * Maps the {@link InputType} to the expected value for cohere for the input_type field in the request using the enum's ordinal. + * The order of these entries is important and needs to match the order in the enum + */ + private static final String[] INPUT_TYPE_MAPPING = { SEARCH_DOCUMENT, SEARCH_QUERY }; + static { + assert INPUT_TYPE_MAPPING.length == InputType.values().length : "input type mapping was incorrectly defined"; + } + + private static final String TEXTS_FIELD = "texts"; + + static final String INPUT_TYPE_FIELD = "input_type"; + static final String EMBEDDING_TYPES_FIELD = "embedding_types"; + + public CohereEmbeddingsRequestEntity { + Objects.requireNonNull(input); + Objects.requireNonNull(taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TEXTS_FIELD, input); + if (model != null) { + builder.field(CohereServiceSettings.MODEL, model); + } + + if (taskSettings.inputType() != null) { + builder.field(INPUT_TYPE_FIELD, covertToString(taskSettings.inputType())); + } + + if (embeddingType != null) { + builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType)); + } + + if (taskSettings.truncation() != null) { + builder.field(CohereServiceFields.TRUNCATE, taskSettings.truncation()); + } + + builder.endObject(); + return builder; + } + + private static String covertToString(InputType inputType) { + return INPUT_TYPE_MAPPING[inputType.ordinal()]; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java new file mode 100644 index 0000000000000..f8ccd91d4e3d2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +public class CohereUtils { + public static final String HOST = "api.cohere.ai"; + public static final String VERSION_1 = "v1"; + public static final String EMBEDDINGS_PATH = "embed"; + + private CohereUtils() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java index 9f86257ba4911..cd4fef6f0e827 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java @@ -9,13 +9,14 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.entity.ByteArrayEntity; import org.elasticsearch.common.Strings; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; import java.net.URI; import java.nio.charset.StandardCharsets; @@ -28,14 +29,21 @@ public class HuggingFaceInferenceRequest implements Request { private final Truncator truncator; private final HuggingFaceAccount account; private final Truncator.TruncationResult truncationResult; + private final HuggingFaceModel model; - public HuggingFaceInferenceRequest(Truncator truncator, HuggingFaceAccount account, Truncator.TruncationResult input) { + public HuggingFaceInferenceRequest( + Truncator truncator, + HuggingFaceAccount account, + Truncator.TruncationResult input, + HuggingFaceModel model + ) { this.truncator = Objects.requireNonNull(truncator); this.account = Objects.requireNonNull(account); this.truncationResult = Objects.requireNonNull(input); + this.model = Objects.requireNonNull(model); } - public HttpRequestBase createRequest() { + public HttpRequest createHttpRequest() { HttpPost httpPost = new HttpPost(account.url()); ByteArrayEntity byteEntity = new ByteArrayEntity( @@ -45,18 +53,23 @@ public HttpRequestBase createRequest() { httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaTypeWithoutParameters()); httpPost.setHeader(createAuthBearerHeader(account.apiKey())); - return httpPost; + return new HttpRequest(httpPost, getInferenceEntityId()); } public URI getURI() { return account.url(); } + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + @Override public Request truncate() { var truncateResult = truncator.truncate(truncationResult.input()); - return new HuggingFaceInferenceRequest(truncator, account, truncateResult); + return new HuggingFaceInferenceRequest(truncator, account, truncateResult, model); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index 3a9fab44aa04e..dbb4c64f95637 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -9,23 +9,22 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.ByteArrayEntity; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.Objects; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; @@ -35,35 +34,28 @@ public class OpenAiEmbeddingsRequest implements Request { private final OpenAiAccount account; private final Truncator.TruncationResult truncationResult; private final URI uri; - private final OpenAiEmbeddingsTaskSettings taskSettings; + private final OpenAiEmbeddingsModel model; public OpenAiEmbeddingsRequest( Truncator truncator, OpenAiAccount account, Truncator.TruncationResult input, - OpenAiEmbeddingsTaskSettings taskSettings + OpenAiEmbeddingsModel model ) { this.truncator = Objects.requireNonNull(truncator); this.account = Objects.requireNonNull(account); this.truncationResult = Objects.requireNonNull(input); - this.uri = buildUri(this.account.url()); - this.taskSettings = Objects.requireNonNull(taskSettings); + this.uri = buildUri(this.account.url(), "OpenAI", OpenAiEmbeddingsRequest::buildDefaultUri); + this.model = Objects.requireNonNull(model); } - private static URI buildUri(URI accountUri) { - try { - return accountUri == null ? buildDefaultUri() : accountUri; - } catch (URISyntaxException e) { - throw new ElasticsearchStatusException("Failed to construct OpenAI URL", RestStatus.INTERNAL_SERVER_ERROR, e); - } - } - - public HttpRequestBase createRequest() { + public HttpRequest createHttpRequest() { HttpPost httpPost = new HttpPost(uri); ByteArrayEntity byteEntity = new ByteArrayEntity( - Strings.toString(new OpenAiEmbeddingsRequestEntity(truncationResult.input(), taskSettings.model(), taskSettings.user())) - .getBytes(StandardCharsets.UTF_8) + Strings.toString( + new OpenAiEmbeddingsRequestEntity(truncationResult.input(), model.getTaskSettings().model(), model.getTaskSettings().user()) + ).getBytes(StandardCharsets.UTF_8) ); httpPost.setEntity(byteEntity); @@ -75,7 +67,12 @@ public HttpRequestBase createRequest() { httpPost.setHeader(createOrgHeader(org)); } - return httpPost; + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); } @Override @@ -87,7 +84,7 @@ public URI getURI() { public Request truncate() { var truncatedInput = truncator.truncate(truncationResult.input()); - return new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, taskSettings); + return new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java new file mode 100644 index 0000000000000..bd808c225d7e3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -0,0 +1,232 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; +import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType.toLowerCase; + +public class CohereEmbeddingsResponseEntity { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Cohere embeddings response"; + + private static final Map> EMBEDDING_PARSERS = Map.of( + toLowerCase(CohereEmbeddingType.FLOAT), + CohereEmbeddingsResponseEntity::parseFloatEmbeddingsArray, + toLowerCase(CohereEmbeddingType.INT8), + CohereEmbeddingsResponseEntity::parseByteEmbeddingsArray + ); + private static final String VALID_EMBEDDING_TYPES_STRING = supportedEmbeddingTypes(); + + private static String supportedEmbeddingTypes() { + var validTypes = EMBEDDING_PARSERS.keySet().toArray(String[]::new); + Arrays.sort(validTypes); + return String.join(", ", validTypes); + } + + /** + * Parses the OpenAI json response. + * For a request like: + * + *
    +     * 
    +     * {
    +     *  "texts": ["hello this is my name", "I wish I was there!"]
    +     * }
    +     * 
    +     * 
    + * + * The response would look like: + * + *
    +     * 
    +     * {
    +     *  "id": "da4f9ea6-37e4-41ab-b5e1-9e2985609555",
    +     *  "texts": [
    +     *      "hello",
    +     *      "awesome"
    +     *  ],
    +     *  "embeddings": [
    +     *      [
    +     *          123
    +     *      ],
    +     *      [
    +     *          123
    +     *      ]
    +     *  ],
    +     *  "meta": {
    +     *      "api_version": {
    +     *          "version": "1"
    +     *      },
    +     *      "warnings": [
    +     *          "default model on embed will be deprecated in the future, please specify a model in the request."
    +     *      ],
    +     *      "billed_units": {
    +     *          "input_tokens": 3
    +     *      }
    +     *  },
    +     *  "response_type": "embeddings_floats"
    +     * }
    +     * 
    +     * 
    + * + * Or this: + * + *
    +     * 
    +     * {
    +     *  "id": "da4f9ea6-37e4-41ab-b5e1-9e2985609555",
    +     *  "texts": [
    +     *      "hello",
    +     *      "awesome"
    +     *  ],
    +     *  "embeddings": {
    +     *      "float": [
    +     *          [
    +     *              123
    +     *          ],
    +     *          [
    +     *              123
    +     *          ],
    +     *      ]
    +     *  },
    +     *  "meta": {
    +     *      "api_version": {
    +     *          "version": "1"
    +     *      },
    +     *      "warnings": [
    +     *          "default model on embed will be deprecated in the future, please specify a model in the request."
    +     *      ],
    +     *      "billed_units": {
    +     *          "input_tokens": 3
    +     *      }
    +     *  },
    +     *  "response_type": "embeddings_floats"
    +     * }
    +     * 
    +     * 
    + */ + public static InferenceServiceResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_OBJECT) { + return parseEmbeddingsObject(jsonParser); + } else if (token == XContentParser.Token.START_ARRAY) { + // if the request did not specify the embedding types then it will default to floats + return parseFloatEmbeddingsArray(jsonParser); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Cohere response"); + } + } + + private static InferenceServiceResults parseEmbeddingsObject(XContentParser parser) throws IOException { + XContentParser.Token token; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + var embeddingValueParser = EMBEDDING_PARSERS.get(parser.currentName()); + if (embeddingValueParser == null) { + continue; + } + + parser.nextToken(); + return embeddingValueParser.apply(parser); + } + } + + throw new IllegalStateException( + Strings.format( + "Failed to find a supported embedding type in the Cohere embeddings response. Supported types are [%s]", + VALID_EMBEDDING_TYPES_STRING + ) + ); + } + + private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException { + var embeddingList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); + + return new TextEmbeddingByteResults(embeddingList); + } + + private static TextEmbeddingByteResults.Embedding parseByteArrayEntry(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + List embeddingValues = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); + + return new TextEmbeddingByteResults.Embedding(embeddingValues); + } + + private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + var parsedByte = parser.shortValue(); + checkByteBounds(parsedByte); + + return (byte) parsedByte; + } + + private static void checkByteBounds(short value) { + if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a byte"); + } + } + + private static InferenceServiceResults parseFloatEmbeddingsArray(XContentParser parser) throws IOException { + var embeddingList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry); + + return new TextEmbeddingResults(embeddingList); + } + + private static TextEmbeddingResults.Embedding parseFloatArrayEntry(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + List embeddingValues = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingFloatEntry); + + return new TextEmbeddingResults.Embedding(embeddingValues); + } + + private static Float parseEmbeddingFloatEntry(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + return parser.floatValue(); + } + + private CohereEmbeddingsResponseEntity() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntity.java new file mode 100644 index 0000000000000..7d1731105e2f5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntity.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorMessage; + +public class CohereErrorResponseEntity implements ErrorMessage { + + private final String errorMessage; + + private CohereErrorResponseEntity(String errorMessage) { + this.errorMessage = errorMessage; + } + + @Override + public String getErrorMessage() { + return errorMessage; + } + + /** + * An example error response for invalid auth would look like + * + * { + * "message": "invalid request: total number of texts must be at most 96 - received 97" + * } + * + * + * + * @param response The error response + * @return An error entity if the response is JSON with the above structure + * or null if the response does not contain the message field + */ + public static CohereErrorResponseEntity fromResponse(HttpResult response) { + try ( + XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + var responseMap = jsonParser.map(); + var message = (String) responseMap.get("message"); + if (message != null) { + return new CohereErrorResponseEntity(message); + } + } catch (Exception e) { + // swallow the error + } + + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 2de1cdc126bd6..0f3aa5b82b189 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -34,6 +34,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -58,11 +59,11 @@ public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} /** - * Semi parsed model where model id, task type and service + * Semi parsed model where inference entity id, task type and service * are known but the settings are not parsed. */ public record UnparsedModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, Map settings, @@ -73,12 +74,12 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) if (modelConfigMap.config() == null) { throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); } - String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); TaskType taskType = TaskType.fromString(taskTypeStr); - return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); } } @@ -94,22 +95,21 @@ public ModelRegistry(Client client) { /** * Get a model with its secret settings - * @param modelId Model to get + * @param inferenceEntityId Model to get * @param listener Model listener */ - public void getModelWithSecrets(String modelId, ActionListener listener) { + public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { - delegate.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId)); + delegate.onFailure(new ResourceNotFoundException("Model not found [{}]", inferenceEntityId)); return; } - var hits = searchResponse.getHits().getHits(); - delegate.onResponse(UnparsedModel.unparsedModelFromMap(createModelConfigMap(hits, modelId))); + delegate.onResponse(UnparsedModel.unparsedModelFromMap(createModelConfigMap(searchResponse.getHits(), inferenceEntityId))); }); - QueryBuilder queryBuilder = documentIdQuery(modelId); + QueryBuilder queryBuilder = documentIdQuery(inferenceEntityId); SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN) .setQuery(queryBuilder) .setSize(2) @@ -121,24 +121,23 @@ public void getModelWithSecrets(String modelId, ActionListener li /** * Get a model. * Secret settings are not included - * @param modelId Model to get + * @param inferenceEntityId Model to get * @param listener Model listener */ - public void getModel(String modelId, ActionListener listener) { + public void getModel(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { - delegate.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId)); + delegate.onFailure(new ResourceNotFoundException("Model not found [{}]", inferenceEntityId)); return; } - var hits = searchResponse.getHits().getHits(); - var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); - QueryBuilder queryBuilder = documentIdQuery(modelId); + QueryBuilder queryBuilder = documentIdQuery(inferenceEntityId); SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN) .setQuery(queryBuilder) .setSize(1) @@ -162,8 +161,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { return; } - var hits = searchResponse.getHits().getHits(); - var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); delegate.onResponse(modelConfigs); }); @@ -212,7 +209,7 @@ public void getAllModels(ActionListener> listener) { client.search(modelSearch, searchListener); } - private List parseHitsAsModels(SearchHit[] hits) { + private List parseHitsAsModels(SearchHits hits) { var modelConfigs = new ArrayList(); for (var hit : hits) { modelConfigs.add(new ModelConfigMap(hit.getSourceAsMap(), Map.of())); @@ -220,8 +217,8 @@ private List parseHitsAsModels(SearchHit[] hits) { return modelConfigs; } - private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { - Map mappedHits = Arrays.stream(hits).collect(Collectors.toMap(hit -> { + private ModelConfigMap createModelConfigMap(SearchHits hits, String inferenceEntityId) { + Map mappedHits = Arrays.stream(hits.getHits()).collect(Collectors.toMap(hit -> { if (hit.getIndex().startsWith(InferenceIndex.INDEX_NAME)) { return InferenceIndex.INDEX_NAME; } @@ -230,11 +227,11 @@ private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { return InferenceSecretsIndex.INDEX_NAME; } - logger.warn(format("Found invalid index for model [%s] at index [%s]", modelId, hit.getIndex())); + logger.warn(format("Found invalid index for model [%s] at index [%s]", inferenceEntityId, hit.getIndex())); throw new IllegalArgumentException( format( "Invalid result while loading model [%s] index: [%s]. Try deleting and reinitializing the service", - modelId, + inferenceEntityId, hit.getIndex() ) ); @@ -243,9 +240,14 @@ private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { if (mappedHits.containsKey(InferenceIndex.INDEX_NAME) == false || mappedHits.containsKey(InferenceSecretsIndex.INDEX_NAME) == false || mappedHits.size() > 2) { - logger.warn(format("Failed to load model [%s], found model parts from index prefixes: [%s]", modelId, mappedHits.keySet())); + logger.warn( + format("Failed to load model [%s], found model parts from index prefixes: [%s]", inferenceEntityId, mappedHits.keySet()) + ); throw new IllegalStateException( - format("Failed to load model, model [%s] is in an invalid state. Try deleting and reinitializing the service", modelId) + format( + "Failed to load model, model [%s] is in an invalid state. Try deleting and reinitializing the service", + inferenceEntityId + ) ); } @@ -259,14 +261,14 @@ public void storeModel(Model model, ActionListener listener) { ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); IndexRequest configRequest = createIndexRequest( - Model.documentId(model.getConfigurations().getModelId()), + Model.documentId(model.getConfigurations().getInferenceEntityId()), InferenceIndex.INDEX_NAME, model.getConfigurations(), false ); IndexRequest secretsRequest = createIndexRequest( - Model.documentId(model.getConfigurations().getModelId()), + Model.documentId(model.getConfigurations().getInferenceEntityId()), InferenceSecretsIndex.INDEX_NAME, model.getSecrets(), false @@ -281,16 +283,16 @@ public void storeModel(Model model, ActionListener listener) { private static ActionListener getStoreModelListener(Model model, ActionListener listener) { return ActionListener.wrap(bulkItemResponses -> { - var modelId = model.getConfigurations().getModelId(); + var inferenceEntityId = model.getConfigurations().getInferenceEntityId(); if (bulkItemResponses.getItems().length == 0) { - logger.warn(format("Storing model [%s] failed, no items were received from the bulk response", modelId)); + logger.warn(format("Storing model [%s] failed, no items were received from the bulk response", inferenceEntityId)); listener.onFailure( new ElasticsearchStatusException( format( "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service", - modelId + inferenceEntityId ), RestStatus.INTERNAL_SERVER_ERROR ) @@ -305,34 +307,34 @@ private static ActionListener getStoreModelListener(Model model, A return; } - logBulkFailures(model.getConfigurations().getModelId(), bulkItemResponses); + logBulkFailures(model.getConfigurations().getInferenceEntityId(), bulkItemResponses); if (ExceptionsHelper.unwrapCause(failure.getCause()) instanceof VersionConflictEngineException) { - listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", modelId)); + listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", inferenceEntityId)); return; } listener.onFailure( new ElasticsearchStatusException( - format("Failed to store inference model [%s]", modelId), + format("Failed to store inference model [%s]", inferenceEntityId), RestStatus.INTERNAL_SERVER_ERROR, failure.getCause() ) ); }, e -> { - String errorMessage = format("Failed to store inference model [%s]", model.getConfigurations().getModelId()); + String errorMessage = format("Failed to store inference model [%s]", model.getConfigurations().getInferenceEntityId()); logger.warn(errorMessage, e); listener.onFailure(new ElasticsearchStatusException(errorMessage, RestStatus.INTERNAL_SERVER_ERROR, e)); }); } - private static void logBulkFailures(String modelId, BulkResponse bulkResponse) { + private static void logBulkFailures(String inferenceEntityId, BulkResponse bulkResponse) { for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) { logger.warn( format( "Failed to store inference model [%s] index: [%s] bulk failure message [%s]", - modelId, + inferenceEntityId, item.getIndex(), item.getFailureMessage() ) @@ -351,10 +353,10 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } - public void deleteModel(String modelId, ActionListener listener) { + public void deleteModel(String inferenceEntityId, ActionListener listener) { DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); - request.setQuery(documentIdQuery(modelId)); + request.setQuery(documentIdQuery(inferenceEntityId)); request.setRefresh(true); client.execute(DeleteByQueryAction.INSTANCE, request, listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE))); @@ -372,7 +374,7 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T } } - private QueryBuilder documentIdQuery(String modelId) { - return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(modelId))); + private QueryBuilder documentIdQuery(String inferenceEntityId) { + return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java index 184b310a9f829..603b5ef41da73 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java @@ -31,9 +31,9 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String taskType = restRequest.param("task_type"); - String modelId = restRequest.param("model_id"); + String inferenceEntityId = restRequest.param("model_id"); - var request = new DeleteInferenceModelAction.Request(modelId, taskType); + var request = new DeleteInferenceModelAction.Request(inferenceEntityId, taskType); return channel -> client.execute(DeleteInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index ce291bcf006ae..3dfa713cdecc7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -31,18 +31,18 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String modelId = null; + String inferenceEntityId = null; TaskType taskType = null; if (restRequest.hasParam("task_type") == false && restRequest.hasParam("model_id") == false) { // _all models request - modelId = "_all"; + inferenceEntityId = "_all"; taskType = TaskType.ANY; } else { taskType = TaskType.fromStringOrStatusException(restRequest.param("task_type")); - modelId = restRequest.param("model_id"); + inferenceEntityId = restRequest.param("model_id"); } - var request = new GetInferenceModelAction.Request(modelId, taskType); + var request = new GetInferenceModelAction.Request(inferenceEntityId, taskType); return channel -> client.execute(GetInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index 0286390a8a3ec..ba7e6b363f003 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -32,9 +32,9 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String taskType = restRequest.param("task_type"); - String modelId = restRequest.param("model_id"); + String inferenceEntityId = restRequest.param("model_id"); try (var parser = restRequest.contentParser()) { - var request = InferenceAction.Request.parseRequest(modelId, taskType, parser); + var request = InferenceAction.Request.parseRequest(inferenceEntityId, taskType, parser); return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java index 1199cf5688fcc..7c9bf400e998b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -32,9 +32,14 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String taskType = restRequest.param("task_type"); - String modelId = restRequest.param("model_id"); - - var request = new PutInferenceModelAction.Request(taskType, modelId, restRequest.requiredContent(), restRequest.getXContentType()); + String inferenceEntityId = restRequest.param("model_id"); + + var request = new PutInferenceModelAction.Request( + taskType, + inferenceEntityId, + restRequest.requiredContent(), + restRequest.getXContentType() + ); return channel -> client.execute(PutInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 1686cd32d4a6b..c218a0ff12c22 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -11,17 +11,22 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.results.TextEmbedding; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.net.URI; import java.net.URISyntaxException; +import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -105,9 +110,23 @@ public static String mustBeNonEmptyString(String settingName, String scope) { return Strings.format("[%s] Invalid value empty string. [%s] must be a non-empty string", scope, settingName); } + public static String invalidValue(String settingName, String scope, String invalidType, String... requiredTypes) { + return Strings.format( + "[%s] Invalid value [%s] received. [%s] must be one of [%s]", + scope, + invalidType, + settingName, + String.join(", ", requiredTypes) + ); + } + // TODO improve URI validation logic - public static URI convertToUri(String url, String settingName, String settingScope, ValidationException validationException) { + public static URI convertToUri(@Nullable String url, String settingName, String settingScope, ValidationException validationException) { try { + if (url == null) { + return null; + } + return createUri(url); } catch (IllegalArgumentException ignored) { validationException.addValidationError(ServiceUtils.invalidUrlErrorMsg(url, settingName, settingScope)); @@ -125,6 +144,14 @@ public static URI createUri(String url) throws IllegalArgumentException { } } + public static URI createOptionalUri(String url) { + if (url == null) { + return null; + } + + return createUri(url); + } + public static SecureString extractRequiredSecureString( Map map, String settingName, @@ -194,8 +221,35 @@ public static String extractOptionalString( return optionalField; } - public static String parsePersistedConfigErrorMsg(String modelId, String serviceName) { - return format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, serviceName); + public static T extractOptionalEnum( + Map map, + String settingName, + String scope, + CheckedFunction converter, + T[] validTypes, + ValidationException validationException + ) { + var enumString = extractOptionalString(map, settingName, scope, validationException); + if (enumString == null) { + return null; + } + + var validTypesAsStrings = Arrays.stream(validTypes).map(type -> type.toString().toLowerCase(Locale.ROOT)).toArray(String[]::new); + try { + return converter.apply(enumString); + } catch (IllegalArgumentException e) { + validationException.addValidationError(invalidValue(settingName, scope, enumString, validTypesAsStrings)); + } + + return null; + } + + public static String parsePersistedConfigErrorMsg(String inferenceEntityId, String serviceName) { + return format( + "Failed to parse stored model [%s] for [%s] service, please delete and add the service again", + inferenceEntityId, + serviceName + ); } public static ElasticsearchStatusException createInvalidModelException(Model model) { @@ -203,7 +257,7 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod format( "The internal model was invalid, please delete the service [%s] with id [%s] and add it again.", model.getConfigurations().getService(), - model.getConfigurations().getModelId() + model.getConfigurations().getInferenceEntityId() ), RestStatus.INTERNAL_SERVER_ERROR ); @@ -219,16 +273,11 @@ public static void getEmbeddingSize(Model model, InferenceService service, Actio assert model.getTaskType() == TaskType.TEXT_EMBEDDING; service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), listener.delegateFailureAndWrap((delegate, r) -> { - if (r instanceof TextEmbeddingResults embeddingResults) { - if (embeddingResults.embeddings().isEmpty()) { - delegate.onFailure( - new ElasticsearchStatusException( - "Could not determine embedding size, no embeddings were returned in test call", - RestStatus.BAD_REQUEST - ) - ); - } else { - delegate.onResponse(embeddingResults.embeddings().get(0).values().size()); + if (r instanceof TextEmbedding embeddingResults) { + try { + delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); + } catch (Exception e) { + delegate.onFailure(new ElasticsearchStatusException("Could not determine embedding size", RestStatus.BAD_REQUEST, e)); } } else { delegate.onFailure( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java new file mode 100644 index 0000000000000..1b4843e441248 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; + +import java.util.Map; + +public abstract class CohereModel extends Model { + public CohereModel(ModelConfigurations configurations, ModelSecrets secrets) { + super(configurations, secrets); + } + + protected CohereModel(CohereModel model, TaskSettings taskSettings) { + super(model, taskSettings); + } + + protected CohereModel(CohereModel model, ServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + public abstract ExecutableAction accept(CohereActionVisitor creator, Map taskSettings); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java new file mode 100644 index 0000000000000..8783f12852ec8 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; + +public class CohereService extends SenderService { + public static final String NAME = "cohere"; + + public CohereService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + public String name() { + return NAME; + } + + @Override + public CohereModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + + CohereModel model = createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME) + ); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); + + return model; + } + + private static CohereModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + Map taskSettings, + @Nullable Map secretSettings, + String failureMessage + ) { + return switch (taskType) { + case TEXT_EMBEDDING -> new CohereEmbeddingsModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + }; + } + + @Override + public CohereModel parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + return createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + secretSettingsMap, + parsePersistedConfigErrorMsg(modelId, NAME) + ); + } + + @Override + public CohereModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + + return createModel(modelId, taskType, serviceSettingsMap, taskSettingsMap, null, parsePersistedConfigErrorMsg(modelId, NAME)); + } + + @Override + public void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + if (model instanceof CohereModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + CohereModel cohereModel = (CohereModel) model; + var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); + + var action = cohereModel.accept(actionCreator, taskSettings); + action.execute(input, listener); + } + + /** + * For text embedding models get the embedding size and + * update the service settings. + * + * @param model The new model + * @param listener The listener + */ + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof CohereEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + ); + } else { + listener.onResponse(model); + } + } + + private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsModel model, int embeddingSize) { + CohereEmbeddingsServiceSettings serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings( + model.getServiceSettings().getCommonSettings().getUri(), + SimilarityMeasure.DOT_PRODUCT, + embeddingSize, + model.getServiceSettings().getCommonSettings().getMaxInputTokens(), + model.getServiceSettings().getCommonSettings().getModel() + ), + model.getServiceSettings().getEmbeddingType() + ); + + return new CohereEmbeddingsModel(model, serviceSettings); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java new file mode 100644 index 0000000000000..807520637f971 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +public class CohereServiceFields { + public static final String TRUNCATE = "truncate"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java new file mode 100644 index 0000000000000..7964741d90343 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createOptionalUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; + +public class CohereServiceSettings implements ServiceSettings { + public static final String NAME = "cohere_service_settings"; + public static final String MODEL = "model"; + + public static CohereServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + + SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = removeAsType(map, DIMENSIONS, Integer.class); + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + String model = extractOptionalString(map, MODEL, ModelConfigurations.SERVICE_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, model); + } + + private final URI uri; + private final SimilarityMeasure similarity; + private final Integer dimensions; + private final Integer maxInputTokens; + private final String model; + + public CohereServiceSettings( + @Nullable URI uri, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens, + @Nullable String model + ) { + this.uri = uri; + this.similarity = similarity; + this.dimensions = dimensions; + this.maxInputTokens = maxInputTokens; + this.model = model; + } + + public CohereServiceSettings( + @Nullable String url, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens, + @Nullable String model + ) { + this(createOptionalUri(url), similarity, dimensions, maxInputTokens, model); + } + + public CohereServiceSettings(StreamInput in) throws IOException { + uri = createOptionalUri(in.readOptionalString()); + similarity = in.readOptionalEnum(SimilarityMeasure.class); + dimensions = in.readOptionalVInt(); + maxInputTokens = in.readOptionalVInt(); + model = in.readOptionalString(); + } + + public URI getUri() { + return uri; + } + + public SimilarityMeasure getSimilarity() { + return similarity; + } + + public Integer getDimensions() { + return dimensions; + } + + public Integer getMaxInputTokens() { + return maxInputTokens; + } + + public String getModel() { + return model; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + toXContentFragment(builder); + + builder.endObject(); + return builder; + } + + public XContentBuilder toXContentFragment(XContentBuilder builder) throws IOException { + if (uri != null) { + builder.field(URL, uri.toString()); + } + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } + if (model != null) { + builder.field(MODEL, model); + } + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + var uriToWrite = uri != null ? uri.toString() : null; + out.writeOptionalString(uriToWrite); + out.writeOptionalEnum(similarity); + out.writeOptionalVInt(dimensions); + out.writeOptionalVInt(maxInputTokens); + out.writeOptionalString(model); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereServiceSettings that = (CohereServiceSettings) o; + return Objects.equals(uri, that.uri) + && Objects.equals(similarity, that.similarity) + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(maxInputTokens, that.maxInputTokens) + && Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(uri, similarity, dimensions, maxInputTokens, model); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereTruncation.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereTruncation.java new file mode 100644 index 0000000000000..e7c9a0247bb1a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereTruncation.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import java.util.Locale; + +/** + * Defines the type of truncation for a cohere request. The specified value determines how the Cohere API will handle inputs + * longer than the maximum token length. + * + *

    + * See api docs for details. + *

    + */ +public enum CohereTruncation { + /** + * When the input exceeds the maximum input token length an error will be returned. + */ + NONE, + /** + * Discard the start of the input + */ + START, + /** + * Discard the end of the input + */ + END; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static CohereTruncation fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java new file mode 100644 index 0000000000000..82d57cfb92381 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import java.util.Locale; + +/** + * Defines the type of embedding that the cohere api should return for a request. + * + *

    + * See api docs for details. + *

    + */ +public enum CohereEmbeddingType { + /** + * Use this when you want to get back the default float embeddings. Valid for all models. + */ + FLOAT, + /** + * Use this when you want to get back signed int8 embeddings. Valid for only v3 models. + */ + INT8; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static String toLowerCase(CohereEmbeddingType type) { + return type.toString().toLowerCase(Locale.ROOT); + } + + public static CohereEmbeddingType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java new file mode 100644 index 0000000000000..c92700e87cd96 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; +import org.elasticsearch.xpack.inference.services.cohere.CohereModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +public class CohereEmbeddingsModel extends CohereModel { + public CohereEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + modelId, + taskType, + service, + CohereEmbeddingsServiceSettings.fromMap(serviceSettings), + CohereEmbeddingsTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + CohereEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + CohereEmbeddingsServiceSettings serviceSettings, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + } + + private CohereEmbeddingsModel(CohereEmbeddingsModel model, CohereEmbeddingsTaskSettings taskSettings) { + super(model, taskSettings); + } + + public CohereEmbeddingsModel(CohereEmbeddingsModel model, CohereEmbeddingsServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + @Override + public CohereEmbeddingsServiceSettings getServiceSettings() { + return (CohereEmbeddingsServiceSettings) super.getServiceSettings(); + } + + @Override + public CohereEmbeddingsTaskSettings getTaskSettings() { + return (CohereEmbeddingsTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings) { + return visitor.create(this, taskSettings); + } + + public CohereEmbeddingsModel overrideWith(Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return this; + } + + var requestTaskSettings = CohereEmbeddingsTaskSettings.fromMap(taskSettings); + return new CohereEmbeddingsModel(this, getTaskSettings().overrideWith(requestTaskSettings)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java new file mode 100644 index 0000000000000..5327bcbcf22dd --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; + +public class CohereEmbeddingsServiceSettings implements ServiceSettings { + public static final String NAME = "cohere_embeddings_service_settings"; + + static final String EMBEDDING_TYPE = "embedding_type"; + + public static CohereEmbeddingsServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + var commonServiceSettings = CohereServiceSettings.fromMap(map); + CohereEmbeddingType embeddingTypes = extractOptionalEnum( + map, + EMBEDDING_TYPE, + ModelConfigurations.SERVICE_SETTINGS, + CohereEmbeddingType::fromString, + CohereEmbeddingType.values(), + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new CohereEmbeddingsServiceSettings(commonServiceSettings, embeddingTypes); + } + + private final CohereServiceSettings commonSettings; + private final CohereEmbeddingType embeddingType; + + public CohereEmbeddingsServiceSettings(CohereServiceSettings commonSettings, @Nullable CohereEmbeddingType embeddingType) { + this.commonSettings = commonSettings; + this.embeddingType = embeddingType; + } + + public CohereEmbeddingsServiceSettings(StreamInput in) throws IOException { + commonSettings = new CohereServiceSettings(in); + embeddingType = in.readOptionalEnum(CohereEmbeddingType.class); + } + + public CohereServiceSettings getCommonSettings() { + return commonSettings; + } + + public CohereEmbeddingType getEmbeddingType() { + return embeddingType; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + commonSettings.toXContentFragment(builder); + builder.field(EMBEDDING_TYPE, embeddingType); + + builder.endObject(); + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + commonSettings.writeTo(out); + out.writeOptionalEnum(embeddingType); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereEmbeddingsServiceSettings that = (CohereEmbeddingsServiceSettings) o; + return Objects.equals(commonSettings, that.commonSettings) && Objects.equals(embeddingType, that.embeddingType); + } + + @Override + public int hashCode() { + return Objects.hash(commonSettings, embeddingType); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java new file mode 100644 index 0000000000000..858efdb0d1ace --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; +import static org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields.TRUNCATE; + +/** + * Defines the task settings for the cohere text embeddings service. + * + *

    + * See api docs for details. + *

    + * + * @param inputType Specifies the type of input you're giving to the model + * @param truncation Specifies how the API will handle inputs longer than the maximum token length + */ +public record CohereEmbeddingsTaskSettings(@Nullable InputType inputType, @Nullable CohereTruncation truncation) implements TaskSettings { + + public static final String NAME = "cohere_embeddings_task_settings"; + public static final CohereEmbeddingsTaskSettings EMPTY_SETTINGS = new CohereEmbeddingsTaskSettings(null, null); + static final String INPUT_TYPE = "input_type"; + + public static CohereEmbeddingsTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + InputType inputType = extractOptionalEnum( + map, + INPUT_TYPE, + ModelConfigurations.TASK_SETTINGS, + InputType::fromString, + InputType.values(), + validationException + ); + CohereTruncation truncation = extractOptionalEnum( + map, + TRUNCATE, + ModelConfigurations.TASK_SETTINGS, + CohereTruncation::fromString, + CohereTruncation.values(), + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new CohereEmbeddingsTaskSettings(inputType, truncation); + } + + public CohereEmbeddingsTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalEnum(InputType.class), in.readOptionalEnum(CohereTruncation.class)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (inputType != null) { + builder.field(INPUT_TYPE, inputType); + } + + if (truncation != null) { + builder.field(TRUNCATE, truncation); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalEnum(inputType); + out.writeOptionalEnum(truncation); + } + + public CohereEmbeddingsTaskSettings overrideWith(CohereEmbeddingsTaskSettings requestTaskSettings) { + var inputTypeToUse = requestTaskSettings.inputType() == null ? inputType : requestTaskSettings.inputType(); + var truncationToUse = requestTaskSettings.truncation() == null ? truncation : requestTaskSettings.truncation(); + + return new CohereEmbeddingsTaskSettings(inputTypeToUse, truncationToUse); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java index a317992bc7c40..e23ae76659700 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java @@ -14,13 +14,13 @@ public class ElserMlNodeModel extends Model { public ElserMlNodeModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, ElserMlNodeServiceSettings serviceSettings, ElserMlNodeTaskSettings taskSettings ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings)); + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings)); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 01fe828d723d2..12bdcd3f20614 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -3,11 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file has been contributed to by a Generative AI */ package org.elasticsearch.xpack.inference.services.elser; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -22,15 +25,23 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -62,7 +73,7 @@ public boolean isInClusterService() { @Override public ElserMlNodeModel parseRequestConfig( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Set modelArchitectures @@ -70,16 +81,8 @@ public ElserMlNodeModel parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap); - // choose a default model version based on the cluster architecture if (serviceSettingsBuilder.getModelVariant() == null) { - boolean homogenous = modelArchitectures.size() == 1; - if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) { - // Use the hardware optimized model - serviceSettingsBuilder.setModelVariant(ELSER_V2_MODEL_LINUX_X86); - } else { - // default to the platform-agnostic model - serviceSettingsBuilder.setModelVariant(ELSER_V2_MODEL); - } + serviceSettingsBuilder.setModelVariant(selectDefaultModelVersionBasedOnClusterArchitecture(modelArchitectures)); } Map taskSettingsMap; @@ -96,21 +99,33 @@ public ElserMlNodeModel parseRequestConfig( throwIfNotEmptyMap(serviceSettingsMap, NAME); throwIfNotEmptyMap(taskSettingsMap, NAME); - return new ElserMlNodeModel(modelId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); + return new ElserMlNodeModel(inferenceEntityId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); + } + + private static String selectDefaultModelVersionBasedOnClusterArchitecture(Set modelArchitectures) { + // choose a default model version based on the cluster architecture + boolean homogenous = modelArchitectures.size() == 1; + if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) { + // Use the hardware optimized model + return ELSER_V2_MODEL_LINUX_X86; + } else { + // default to the platform-agnostic model + return ELSER_V2_MODEL; + } } @Override public ElserMlNodeModel parsePersistedConfigWithSecrets( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Map secrets ) { - return parsePersistedConfig(modelId, taskType, config); + return parsePersistedConfig(inferenceEntityId, taskType, config); } @Override - public ElserMlNodeModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + public ElserMlNodeModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap); @@ -124,14 +139,16 @@ public ElserMlNodeModel parsePersistedConfig(String modelId, TaskType taskType, var taskSettings = taskSettingsFromMap(taskType, taskSettingsMap); - return new ElserMlNodeModel(modelId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); + return new ElserMlNodeModel(inferenceEntityId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); } @Override public void start(Model model, ActionListener listener) { if (model instanceof ElserMlNodeModel == false) { listener.onFailure( - new IllegalStateException("Error starting model, [" + model.getConfigurations().getModelId() + "] is not an elser model") + new IllegalStateException( + "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an elser model" + ) ); return; } @@ -148,16 +165,47 @@ public void start(Model model, ActionListener listener) { var startRequest = new StartTrainedModelDeploymentAction.Request( serviceSettings.getModelVariant(), - model.getConfigurations().getModelId() + model.getConfigurations().getInferenceEntityId() ); startRequest.setNumberOfAllocations(serviceSettings.getNumAllocations()); startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads()); startRequest.setWaitForState(STARTED); + client.execute(StartTrainedModelDeploymentAction.INSTANCE, startRequest, elserNotDownloadedListener(model, listener)); + } + + private static ActionListener elserNotDownloadedListener( + Model model, + ActionListener listener + ) { + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException( + "Could not start the ELSER service as the ELSER model for this platform cannot be found." + + " ELSER needs to be downloaded before it can be started" + ) + ); + return; + } + listener.onFailure(e); + } + }; + } + + @Override + public void stop(String inferenceEntityId, ActionListener listener) { client.execute( - StartTrainedModelDeploymentAction.INSTANCE, - startRequest, - listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE)) + StopTrainedModelDeploymentAction.INSTANCE, + new StopTrainedModelDeploymentAction.Request(inferenceEntityId), + listener.delegateFailureAndWrap((delegatedResponseListener, response) -> delegatedResponseListener.onResponse(Boolean.TRUE)) ); } @@ -176,7 +224,7 @@ public void infer(Model model, List input, Map taskSetti } var request = InferTrainedModelDeploymentAction.Request.forTextInput( - model.getConfigurations().getModelId(), + model.getConfigurations().getInferenceEntityId(), TextExpansionConfigUpdate.EMPTY_UPDATE, input, TimeValue.timeValueSeconds(10) // TODO get timeout from request @@ -188,6 +236,33 @@ public void infer(Model model, List input, Map taskSetti ); } + @Override + public void putModel(Model model, ActionListener listener) { + if (model instanceof ElserMlNodeModel == false) { + listener.onFailure( + new IllegalStateException( + "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an elser model" + ) + ); + return; + } else { + String modelVariant = ((ElserMlNodeModel) model).getServiceSettings().getModelVariant(); + var fieldNames = List.of(); + var input = new TrainedModelInput(fieldNames); + var config = TrainedModelConfig.builder().setInput(input).setModelId(modelVariant).build(); + PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); + executeAsyncWithOrigin( + client, + INFERENCE_ORIGIN, + PutTrainedModelAction.INSTANCE, + putRequest, + listener.delegateFailure((l, r) -> { + l.onResponse(Boolean.TRUE); + }) + ); + } + } + private static ElserMlNodeTaskSettings taskSettingsFromMap(TaskType taskType, Map config) { if (taskType != TaskType.SPARSE_EMBEDDING) { throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java index 2ea7b080d059d..91edf4a2de09c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java @@ -98,8 +98,7 @@ static boolean transportVersionIsCompatibleWithElserModelVersion(TransportVersio if (transportVersion.onOrAfter(TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED)) { return true; } else { - return transportVersion.onOrAfter(TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED_PATCH) - && transportVersion.before(nextNonPatchVersion); + return transportVersion.onOrAfter(TransportVersions.V_8_11_X) && transportVersion.before(nextNonPatchVersion); } } @@ -132,7 +131,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_074; + return TransportVersions.V_8_11_X; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java index c494cab08d8ae..9b9f6e41113e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java @@ -41,7 +41,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_074; + return TransportVersions.V_8_11_X; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index a7dc26b8472d1..ef93cdd57b756 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -36,7 +36,7 @@ public HuggingFaceBaseService(SetOnce factory, SetOnce @Override public HuggingFaceModel parseRequestConfig( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Set platformArchitectures @@ -44,7 +44,7 @@ public HuggingFaceModel parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); var model = createModel( - modelId, + inferenceEntityId, taskType, serviceSettingsMap, serviceSettingsMap, @@ -59,7 +59,7 @@ public HuggingFaceModel parseRequestConfig( @Override public HuggingFaceModel parsePersistedConfigWithSecrets( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Map secrets @@ -67,18 +67,24 @@ public HuggingFaceModel parsePersistedConfigWithSecrets( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - return createModel(modelId, taskType, serviceSettingsMap, secretSettingsMap, parsePersistedConfigErrorMsg(modelId, name())); + return createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + secretSettingsMap, + parsePersistedConfigErrorMsg(inferenceEntityId, name()) + ); } @Override - public HuggingFaceModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + public HuggingFaceModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - return createModel(modelId, taskType, serviceSettingsMap, null, parsePersistedConfigErrorMsg(modelId, name())); + return createModel(inferenceEntityId, taskType, serviceSettingsMap, null, parsePersistedConfigErrorMsg(inferenceEntityId, name())); } protected abstract HuggingFaceModel createModel( - String modelId, + String inferenceEntityId, TaskType taskType, Map serviceSettings, Map secretSettings, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index f1f177bb6bac3..401c3754759a7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -33,15 +33,15 @@ public HuggingFaceService(SetOnce factory, SetOnce serviceSettings, @Nullable Map secretSettings, String failureMessage ) { return switch (taskType) { - case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel(modelId, taskType, NAME, serviceSettings, secretSettings); - case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index 6464ca0e0fda8..b3b130b22a1fa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -52,9 +52,7 @@ public static HuggingFaceServiceSettings fromMap(Map map) { public static URI extractUri(Map map, String fieldName, ValidationException validationException) { String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); - if (parsedUrl == null) { - return null; - } + return convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 4d784f96b0205..2968566208624 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -21,14 +21,14 @@ public class HuggingFaceElserModel extends HuggingFaceModel { public HuggingFaceElserModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, Map serviceSettings, @Nullable Map secrets ) { this( - modelId, + inferenceEntityId, taskType, service, HuggingFaceElserServiceSettings.fromMap(serviceSettings), @@ -37,13 +37,13 @@ public HuggingFaceElserModel( } HuggingFaceElserModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, HuggingFaceElserServiceSettings serviceSettings, @Nullable HuggingFaceElserSecretSettings secretSettings ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secretSettings)); + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), new ModelSecrets(secretSettings)); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index c06b6a62db29a..579745ea7f4fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -35,14 +35,14 @@ public String name() { @Override protected HuggingFaceModel createModel( - String modelId, + String inferenceEntityId, TaskType taskType, Map serviceSettings, @Nullable Map secretSettings, String failureMessage ) { return switch (taskType) { - case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index 36d469fd05056..351173de95cc7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -23,28 +23,40 @@ public class HuggingFaceEmbeddingsModel extends HuggingFaceModel { public HuggingFaceEmbeddingsModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, Map serviceSettings, @Nullable Map secrets ) { - this(modelId, taskType, service, HuggingFaceServiceSettings.fromMap(serviceSettings), DefaultSecretSettings.fromMap(secrets)); + this( + inferenceEntityId, + taskType, + service, + HuggingFaceServiceSettings.fromMap(serviceSettings), + DefaultSecretSettings.fromMap(secrets) + ); } // Should only be used directly for testing HuggingFaceEmbeddingsModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, HuggingFaceServiceSettings serviceSettings, @Nullable DefaultSecretSettings secrets ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secrets)); + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), new ModelSecrets(secrets)); } public HuggingFaceEmbeddingsModel(HuggingFaceEmbeddingsModel model, HuggingFaceServiceSettings serviceSettings) { - this(model.getModelId(), model.getTaskType(), model.getConfigurations().getService(), serviceSettings, model.getSecretSettings()); + this( + model.getInferenceEntityId(), + model.getTaskType(), + model.getConfigurations().getService(), + serviceSettings, + model.getSecretSettings() + ); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java index 97823e3bc9079..1e158725f531d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java @@ -10,6 +10,8 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionVisitor; @@ -21,5 +23,13 @@ public OpenAiModel(ModelConfigurations configurations, ModelSecrets secrets) { super(configurations, secrets); } + protected OpenAiModel(OpenAiModel model, TaskSettings taskSettings) { + super(model, taskSettings); + } + + protected OpenAiModel(OpenAiModel model, ServiceSettings serviceSettings) { + super(model, serviceSettings); + } + public abstract ExecutableAction accept(OpenAiActionVisitor creator, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 1bdd1abce0b45..9b5283ef4f803 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -50,7 +50,7 @@ public String name() { @Override public OpenAiModel parseRequestConfig( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Set platformArchitectures @@ -59,7 +59,7 @@ public OpenAiModel parseRequestConfig( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); OpenAiModel model = createModel( - modelId, + inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, @@ -75,7 +75,7 @@ public OpenAiModel parseRequestConfig( } private static OpenAiModel createModel( - String modelId, + String inferenceEntityId, TaskType taskType, Map serviceSettings, Map taskSettings, @@ -83,14 +83,21 @@ private static OpenAiModel createModel( String failureMessage ) { return switch (taskType) { - case TEXT_EMBEDDING -> new OpenAiEmbeddingsModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + case TEXT_EMBEDDING -> new OpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @Override public OpenAiModel parsePersistedConfigWithSecrets( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Map secrets @@ -100,21 +107,28 @@ public OpenAiModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); return createModel( - modelId, + inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, secretSettingsMap, - parsePersistedConfigErrorMsg(modelId, NAME) + parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); } @Override - public OpenAiModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + public OpenAiModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - return createModel(modelId, taskType, serviceSettingsMap, taskSettingsMap, null, parsePersistedConfigErrorMsg(modelId, NAME)); + return createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + taskSettingsMap, + null, + parsePersistedConfigErrorMsg(inferenceEntityId, NAME) + ); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java index 5ade2aad0acb4..4e96ac73157ad 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java @@ -28,7 +28,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createOptionalUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; @@ -50,17 +50,6 @@ public static OpenAiServiceSettings fromMap(Map map) { SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); Integer dims = removeAsType(map, DIMENSIONS, Integer.class); Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - - // Throw if any of the settings were empty strings or invalid - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - // the url is optional and only for testing - if (url == null) { - return new OpenAiServiceSettings((URI) null, organizationId, similarity, dims, maxInputTokens); - } - URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); if (validationException.validationErrors().isEmpty() == false) { @@ -100,14 +89,6 @@ public OpenAiServiceSettings( this(createOptionalUri(uri), organizationId, similarity, dimensions, maxInputTokens); } - private static URI createOptionalUri(String url) { - if (url == null) { - return null; - } - - return createUri(url); - } - public OpenAiServiceSettings(StreamInput in) throws IOException { uri = createOptionalUri(in.readOptionalString()); organizationId = in.readOptionalString(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 250837d895590..98b0161665d8e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -22,7 +22,7 @@ public class OpenAiEmbeddingsModel extends OpenAiModel { public OpenAiEmbeddingsModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, Map serviceSettings, @@ -30,7 +30,7 @@ public OpenAiEmbeddingsModel( @Nullable Map secrets ) { this( - modelId, + inferenceEntityId, taskType, service, OpenAiServiceSettings.fromMap(serviceSettings), @@ -41,40 +41,22 @@ public OpenAiEmbeddingsModel( // Should only be used directly for testing OpenAiEmbeddingsModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, OpenAiServiceSettings serviceSettings, OpenAiEmbeddingsTaskSettings taskSettings, @Nullable DefaultSecretSettings secrets ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); } private OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiEmbeddingsTaskSettings taskSettings) { - super( - new ModelConfigurations( - originalModel.getConfigurations().getModelId(), - originalModel.getConfigurations().getTaskType(), - originalModel.getConfigurations().getService(), - originalModel.getServiceSettings(), - taskSettings - ), - new ModelSecrets(originalModel.getSecretSettings()) - ); + super(originalModel, taskSettings); } public OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiServiceSettings serviceSettings) { - super( - new ModelConfigurations( - originalModel.getConfigurations().getModelId(), - originalModel.getConfigurations().getTaskType(), - originalModel.getConfigurations().getService(), - serviceSettings, - originalModel.getTaskSettings() - ), - new ModelSecrets(originalModel.getSecretSettings()) - ); + super(originalModel, serviceSettings); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java index 3adfcd29b0f7a..16c3564f14328 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java @@ -34,21 +34,21 @@ public static ModelConfigurations createRandomInstance() { public static ModelConfigurations mutateTestInstance(ModelConfigurations instance) { switch (randomIntBetween(0, 2)) { case 0 -> new ModelConfigurations( - instance.getModelId() + "foo", + instance.getInferenceEntityId() + "foo", instance.getTaskType(), instance.getService(), instance.getServiceSettings(), instance.getTaskSettings() ); case 1 -> new ModelConfigurations( - instance.getModelId(), + instance.getInferenceEntityId(), TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length], instance.getService(), instance.getServiceSettings(), instance.getTaskSettings() ); case 2 -> new ModelConfigurations( - instance.getModelId(), + instance.getInferenceEntityId(), instance.getTaskType(), instance.getService() + "bar", instance.getServiceSettings(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java index 6962ca67a8c17..ac7fc6ba56952 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java @@ -85,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED; + return TransportVersions.V_8_11_X; } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java index 091c11a480c0d..93694f167259f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -31,10 +31,10 @@ protected GetInferenceModelAction.Request createTestInstance() { @Override protected GetInferenceModelAction.Request mutateInstance(GetInferenceModelAction.Request instance) { return switch (randomIntBetween(0, 1)) { - case 0 -> new GetInferenceModelAction.Request(instance.getModelId() + "foo", instance.getTaskType()); + case 0 -> new GetInferenceModelAction.Request(instance.getInferenceEntityId() + "foo", instance.getTaskType()); case 1 -> { var nextTaskType = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; - yield new GetInferenceModelAction.Request(instance.getModelId(), nextTaskType); + yield new GetInferenceModelAction.Request(instance.getInferenceEntityId(), nextTaskType); } default -> throw new UnsupportedOperationException(); }; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index ee7bfc96c1370..4f7ae9436418f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -82,7 +82,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc var nextTask = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; yield new InferenceAction.Request( nextTask, - instance.getModelId(), + instance.getInferenceEntityId(), instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -90,7 +90,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc } case 1 -> new InferenceAction.Request( instance.getTaskType(), - instance.getModelId() + "foo", + instance.getInferenceEntityId() + "foo", instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -100,7 +100,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc changedInputs.add("bar"); yield new InferenceAction.Request( instance.getTaskType(), - instance.getModelId(), + instance.getInferenceEntityId(), changedInputs, instance.getTaskSettings(), instance.getInputType() @@ -116,7 +116,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc } yield new InferenceAction.Request( instance.getTaskType(), - instance.getModelId(), + instance.getInferenceEntityId(), instance.getInput(), taskSettings, instance.getInputType() @@ -126,7 +126,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc var nextInputType = InputType.values()[(instance.getInputType().ordinal() + 1) % InputType.values().length]; yield new InferenceAction.Request( instance.getTaskType(), - instance.getModelId(), + instance.getInferenceEntityId(), instance.getInput(), instance.getTaskSettings(), nextInputType diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java index bdbca6426b601..d69cc58dc0871 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java @@ -34,25 +34,25 @@ protected PutInferenceModelAction.Request mutateInstance(PutInferenceModelAction return switch (randomIntBetween(0, 3)) { case 0 -> new PutInferenceModelAction.Request( TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length].toString(), - instance.getModelId(), + instance.getInferenceEntityId(), instance.getContent(), instance.getContentType() ); case 1 -> new PutInferenceModelAction.Request( instance.getTaskType().toString(), - instance.getModelId() + "foo", + instance.getInferenceEntityId() + "foo", instance.getContent(), instance.getContentType() ); case 2 -> new PutInferenceModelAction.Request( instance.getTaskType().toString(), - instance.getModelId(), + instance.getInferenceEntityId(), randomBytesReference(instance.getContent().length() + 1), instance.getContentType() ); case 3 -> new PutInferenceModelAction.Request( instance.getTaskType().toString(), - instance.getModelId(), + instance.getInferenceEntityId(), instance.getContent(), XContentType.values()[(instance.getContentType().ordinal() + 1) % XContentType.values().length] ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java new file mode 100644 index 0000000000000..67a95265f093d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests; +import org.hamcrest.MatcherAssert; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class CohereActionCreatorTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testCreate_CohereEmbeddingsModel() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START), + 1024, + 1024, + "model", + CohereEmbeddingType.FLOAT + ); + var actionCreator = new CohereActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, CohereTruncation.END); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_query", + "embedding_types", + List.of("float"), + "truncate", + "end" + ) + ) + ); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java new file mode 100644 index 0000000000000..501d5a5e42bfe --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -0,0 +1,348 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class CohereEmbeddingsActionTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START), + "model", + CohereEmbeddingType.FLOAT, + sender + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_document", + "embedding_types", + List.of("float"), + "truncate", + "start" + ) + ) + ); + } + } + + public void testExecute_ReturnsSuccessfulResponse_ForInt8ResponseType() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "int8": [ + [ + 0, + -1 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START), + "model", + CohereEmbeddingType.INT8, + sender + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result.asMap(), + is(TextEmbeddingByteResultsTests.buildExpectation(List.of(List.of((byte) 0, (byte) -1)))) + ); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_document", + "embedding_types", + List.of("int8"), + "truncate", + "start" + ) + ) + ); + } + } + + public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { + try (var sender = mock(Sender.class)) { + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> createAction("^^", "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender) + ); + MatcherAssert.assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + MatcherAssert.assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(format("Failed to send Cohere embeddings request to [%s]", getUrl(webServer))) + ); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled_WhenUrlIsNull() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Failed to send Cohere embeddings request")); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(format("Failed to send Cohere embeddings request to [%s]", getUrl(webServer))) + ); + } + + public void testExecute_ThrowsExceptionWithNullUrl() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Failed to send Cohere embeddings request")); + } + + private CohereEmbeddingsAction createAction( + String url, + String apiKey, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable String modelName, + @Nullable CohereEmbeddingType embeddingType, + Sender sender + ) { + var model = CohereEmbeddingsModelTests.createModel(url, apiKey, taskSettings, 1024, 1024, modelName, embeddingType); + + return new CohereEmbeddingsAction(sender, model, createWithEmptySettings(threadPool)); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java index 40eec862150c7..25b05327a21b7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -39,7 +39,7 @@ public class HuggingFaceActionTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private static final String URl = "http://localhost:12345"; + private static final String URL = "http://localhost:12345"; private ThreadPool threadPool; @Before @@ -56,7 +56,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { var sender = mock(Sender.class); doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); - var action = createAction(URl, sender); + var action = createAction(URL, sender); PlainActionFuture listener = new PlainActionFuture<>(); action.execute(List.of("abc"), listener); @@ -77,28 +77,34 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled return Void.TYPE; }).when(sender).send(any(), any()); - var action = createAction(URl, sender); + var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Hugging Face test action request from inference entity id [%s]", "inferenceEntityId")) + ); } public void testExecute_ThrowsException() { var sender = mock(Sender.class); doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); - var action = createAction(URl, sender); + var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Hugging Face test action request from inference entity id [%s]", "inferenceEntityId")) + ); } private HuggingFaceAction createAction(String url, Sender sender) { @@ -112,4 +118,16 @@ private HuggingFaceAction createAction(String url, Sender sender) { "test action" ); } + + private HuggingFaceAction createAction(String url, Sender sender, String modelId) { + var model = createModel(url, "secret", modelId); + + return new HuggingFaceAction( + sender, + model, + new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY, TruncatorTests.createTruncator()), + new AlwaysRetryingResponseHandler("test", (result) -> null), + "test action" + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java new file mode 100644 index 0000000000000..31945d5a8b4fc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.cohere; + +import org.apache.http.Header; +import org.apache.http.HeaderElement; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.hamcrest.MatcherAssert; + +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CohereResponseHandlerTests extends ESTestCase { + public void testCheckForFailureStatusCode_DoesNotThrowFor200() { + callCheckForFailureStatusCode(200, "id"); + } + + public void testCheckForFailureStatusCode_ThrowsFor503() { + var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(503, "id")); + assertFalse(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString("Received a server error status code for request from inference entity id [id] status [503]") + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST)); + } + + public void testCheckForFailureStatusCode_ThrowsFor429() { + var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(429, "id")); + assertTrue(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString("Received a rate limit status code for request from inference entity id [id] status [429]") + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + } + + public void testCheckForFailureStatusCode_ThrowsFor400() { + var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(400, "id")); + assertFalse(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString("Received an unsuccessful status code for request from inference entity id [id] status [400]") + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST)); + } + + public void testCheckForFailureStatusCode_ThrowsFor400_TextsTooLarge() { + var exception = expectThrows( + RetryException.class, + () -> callCheckForFailureStatusCode(400, "invalid request: total number of texts must be at most 96 - received 100", "id") + ); + assertFalse(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString("Received a texts array too large response for request from inference entity id [id] status [400]") + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST)); + } + + public void testCheckForFailureStatusCode_ThrowsFor401() { + var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(401, "inferenceEntityId")); + assertFalse(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString( + "Received an authentication error status code for request from inference entity id [inferenceEntityId] status [401]" + ) + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.UNAUTHORIZED)); + } + + public void testCheckForFailureStatusCode_ThrowsFor300() { + var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(300, "id")); + assertFalse(exception.shouldRetry()); + MatcherAssert.assertThat( + exception.getCause().getMessage(), + containsString("Unhandled redirection for request from inference entity id [id] status [300]") + ); + MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); + } + + private static void callCheckForFailureStatusCode(int statusCode, String modelId) { + callCheckForFailureStatusCode(statusCode, null, modelId); + } + + private static void callCheckForFailureStatusCode(int statusCode, @Nullable String errorMessage, String modelId) { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + var header = mock(Header.class); + when(header.getElements()).thenReturn(new HeaderElement[] {}); + when(httpResponse.getFirstHeader(anyString())).thenReturn(header); + + String responseJson = Strings.format(""" + { + "message": "%s" + } + """, errorMessage); + + var mockRequest = mock(Request.class); + when(mockRequest.getInferenceEntityId()).thenReturn(modelId); + var httpResult = new HttpResult(httpResponse, errorMessage == null ? new byte[] {} : responseJson.getBytes(StandardCharsets.UTF_8)); + var handler = new CohereResponseHandler("", (request, result) -> null); + + handler.checkForFailureStatusCode(mockRequest, httpResult); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java index 8fbef260dfc50..cfdf2e8bb8bcd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java @@ -76,7 +76,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { assertThat(result.response().getStatusLine().getStatusCode(), equalTo(responseCode)); assertThat(new String(result.body(), StandardCharsets.UTF_8), is(body)); assertThat(webServer.requests(), hasSize(1)); - assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.getURI().getPath())); + assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.httpRequestBase().getURI().getPath())); assertThat(webServer.requests().get(0).getUri().getQuery(), equalTo(paramKey + "=" + paramValue)); assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java index a26e4433f842b..3fb6d14e66674 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java @@ -22,6 +22,7 @@ import org.apache.http.nio.reactor.IOReactorException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; @@ -31,6 +32,8 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.junit.After; import org.junit.Before; @@ -41,7 +44,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterService; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; @@ -92,7 +94,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { assertThat(result.response().getStatusLine().getStatusCode(), equalTo(responseCode)); assertThat(new String(result.body(), StandardCharsets.UTF_8), is(body)); assertThat(webServer.requests(), hasSize(1)); - assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.getURI().getPath())); + assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.httpRequestBase().getURI().getPath())); assertThat(webServer.requests().get(0).getUri().getQuery(), equalTo(paramKey + "=" + paramValue)); assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); } @@ -103,7 +105,7 @@ public void testSend_ThrowsErrorIfCalledBeforeStart() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, - () -> httpClient.send(mock(HttpUriRequest.class), HttpClientContext.create(), listener) + () -> httpClient.send(HttpRequestTests.createMock("inferenceEntityId"), HttpClientContext.create(), listener) ); assertThat(thrownException.getMessage(), is("call start() before attempting to send a request")); @@ -152,7 +154,10 @@ public void testSend_CancelledCallsOnFailure() throws Exception { client.send(httpPost, HttpClientContext.create(), listener); var thrownException = expectThrows(CancellationException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is(format("Request [%s] was cancelled", httpPost.getRequestLine()))); + assertThat( + thrownException.getMessage(), + is(Strings.format("Request from inference entity id [%s] was cancelled", httpPost.inferenceEntityId())) + ); } } @@ -197,7 +202,7 @@ public void testSend_FailsWhenMaxBytesReadIsExceeded() throws Exception { } } - public static HttpPost createHttpPost(int port, String paramKey, String paramValue) throws URISyntaxException { + public static HttpRequest createHttpPost(int port, String paramKey, String paramValue) throws URISyntaxException { URI uri = new URIBuilder().setScheme("http") .setHost("localhost") .setPort(port) @@ -214,7 +219,7 @@ public static HttpPost createHttpPost(int port, String paramKey, String paramVal httpPost.setEntity(byteEntity); httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); - return httpPost; + return new HttpRequest(httpPost, "inferenceEntityId"); } public static PoolingNHttpClientConnectionManager createConnectionManager() throws IOReactorException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java index affbd43958e29..801c7c15b9429 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java @@ -9,12 +9,13 @@ import org.apache.http.HttpResponse; import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.request.Request; import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForFailureStatusCode; +import static org.elasticsearch.xpack.inference.external.request.RequestTests.mockRequest; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -32,10 +33,10 @@ public void testCheckForFailureStatusCode_ThrowsWhenStatusCodeIs300() { var thrownException = expectThrows( IllegalStateException.class, - () -> checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result) + () -> checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mockRequest("id"), result) ); - assertThat(thrownException.getMessage(), is("Unhandled redirection for request [null] status [300]")); + assertThat(thrownException.getMessage(), is("Unhandled redirection for request from inference entity id [id] status [300]")); } public void testCheckForFailureStatusCode_DoesNotThrowWhenStatusCodeIs200() { @@ -47,7 +48,7 @@ public void testCheckForFailureStatusCode_DoesNotThrowWhenStatusCodeIs200() { var result = new HttpResult(httpResponse, new byte[0]); - checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result); + checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(Request.class), result); } public void testCheckForEmptyBody_DoesNotThrowWhenTheBodyIsNotEmpty() { @@ -56,7 +57,7 @@ public void testCheckForEmptyBody_DoesNotThrowWhenTheBodyIsNotEmpty() { var result = new HttpResult(httpResponse, new byte[] { 'a' }); - checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result); + checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(Request.class), result); } public void testCheckForEmptyBody_ThrowsWhenTheBodyIsEmpty() { @@ -67,9 +68,9 @@ public void testCheckForEmptyBody_ThrowsWhenTheBodyIsEmpty() { var thrownException = expectThrows( IllegalStateException.class, - () -> checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result) + () -> checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mockRequest("id"), result) ); - assertThat(thrownException.getMessage(), is("Response body was empty for request [null]")); + assertThat(thrownException.getMessage(), is("Response body was empty for request from inference entity id [id]")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java index 8b47dade32872..8d60c2f5bfa48 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java @@ -10,7 +10,6 @@ import org.apache.http.ConnectionClosedException; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -22,6 +21,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.Before; @@ -542,7 +542,7 @@ private void executeTasks(Runnable runnable, int retries) { private static Request mockRequest() { var request = mock(Request.class); when(request.truncate()).thenReturn(request); - when(request.createRequest()).thenReturn(mock(HttpRequestBase.class)); + when(request.createHttpRequest()).thenReturn(HttpRequestTests.createMock("inferenceEntityId")); return request; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java index b9b847b5187cb..f25312260bfd0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.support.PlainActionFuture; @@ -18,6 +17,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.junit.After; import org.junit.Before; @@ -61,7 +61,7 @@ public void testQueueSize_IsEmpty() { public void testQueueSize_IsOne() { var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - service.send(mock(HttpRequestBase.class), null, new PlainActionFuture<>()); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); } @@ -114,7 +114,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { }); PlainActionFuture listener = new PlainActionFuture<>(); - service.send(mock(HttpRequestBase.class), null, listener); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); service.start(); @@ -134,7 +134,7 @@ public void testSend_AfterShutdown_Throws() { service.shutdown(); var listener = new PlainActionFuture(); - service.send(mock(HttpRequestBase.class), null, listener); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -147,9 +147,9 @@ public void testSend_AfterShutdown_Throws() { public void testSend_Throws_WhenQueueIsFull() { var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, 1, null); - service.send(mock(HttpRequestBase.class), null, new PlainActionFuture<>()); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); var listener = new PlainActionFuture(); - service.send(mock(HttpRequestBase.class), null, listener); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -176,7 +176,10 @@ public void testTaskThrowsError_CallsOnFailure() throws Exception { service.start(); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is(format("Failed to send request [%s]", request.getRequestLine()))); + assertThat( + thrownException.getMessage(), + is(format("Failed to send request from inference entity id [%s]", request.inferenceEntityId())) + ); assertThat(thrownException.getCause(), instanceOf(IllegalArgumentException.class)); assertTrue(service.isTerminated()); } @@ -197,7 +200,7 @@ public void testSend_CallsOnFailure_WhenRequestTimesOut() { var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); var listener = new PlainActionFuture(); - service.send(mock(HttpRequestBase.class), TimeValue.timeValueNanos(1), listener); + service.send(HttpRequestTests.createMock("inferenceEntityId"), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -211,7 +214,7 @@ public void testSend_NotifiesTasksOfShutdown() { var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); var listener = new PlainActionFuture(); - service.send(mock(HttpRequestBase.class), null, listener); + service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); service.shutdown(); service.start(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java index c3bfbe2c294fc..6b085f8dd80a7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; @@ -23,6 +22,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -95,7 +95,7 @@ public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception assertThat(result.response().getStatusLine().getStatusCode(), equalTo(responseCode)); assertThat(new String(result.body(), StandardCharsets.UTF_8), is(body)); assertThat(webServer.requests(), hasSize(1)); - assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.getURI().getPath())); + assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.httpRequestBase().getURI().getPath())); assertThat(webServer.requests().get(0).getUri().getQuery(), equalTo(paramKey + "=" + paramValue)); assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); } @@ -106,7 +106,10 @@ public void testHttpRequestSender_Throws_WhenCallingSendBeforeStart() throws Exc try (var sender = senderFactory.createSender("test_service")) { PlainActionFuture listener = new PlainActionFuture<>(); - var thrownException = expectThrows(AssertionError.class, () -> sender.send(mock(HttpRequestBase.class), listener)); + var thrownException = expectThrows( + AssertionError.class, + () -> sender.send(HttpRequestTests.createMock("inferenceEntityId"), listener) + ); assertThat(thrownException.getMessage(), is("call start() before sending a request")); } } @@ -125,7 +128,7 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(mock(HttpRequestBase.class), TimeValue.timeValueNanos(1), listener); + sender.send(HttpRequestTests.createMock("inferenceEntityId"), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -146,7 +149,7 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(mock(HttpRequestBase.class), TimeValue.timeValueNanos(1), listener); + sender.send(HttpRequestTests.createMock("id"), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index ce470fa002824..eaf1a0ac267cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -9,7 +9,6 @@ import org.apache.http.HttpHeaders; import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.protocol.HttpClientContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; @@ -24,6 +23,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -92,7 +92,7 @@ public void testDoRun_SendsRequestAndReceivesResponse() throws Exception { assertThat(result.response().getStatusLine().getStatusCode(), equalTo(responseCode)); assertThat(new String(result.body(), StandardCharsets.UTF_8), is(body)); assertThat(webServer.requests(), hasSize(1)); - assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.getURI().getPath())); + assertThat(webServer.requests().get(0).getUri().getPath(), equalTo(httpPost.httpRequestBase().getURI().getPath())); assertThat(webServer.requests().get(0).getUri().getQuery(), equalTo(paramKey + "=" + paramValue)); assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); } @@ -111,7 +111,10 @@ public void testDoRun_SendThrowsIOException() throws Exception { requestTask.doRun(); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is(format("Failed to send request [%s]", httpPost.getRequestLine()))); + assertThat( + thrownException.getMessage(), + is(format("Failed to send request from inference entity id [%s]", httpPost.inferenceEntityId())) + ); } public void testRequest_DoesNotCallOnFailureForTimeout_AfterSendThrowsIllegalArgumentException() throws Exception { @@ -139,7 +142,10 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterSendThrowsIllegalArg ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); verify(listener, times(1)).onFailure(argument.capture()); - assertThat(argument.getValue().getMessage(), is(format("Failed to send request [%s]", httpPost.getRequestLine()))); + assertThat( + argument.getValue().getMessage(), + is(format("Failed to send request from inference entity id [%s]", httpPost.inferenceEntityId())) + ); assertThat(argument.getValue(), instanceOf(ElasticsearchException.class)); assertThat(argument.getValue().getCause(), instanceOf(IllegalArgumentException.class)); @@ -152,7 +158,7 @@ public void testRequest_ReturnsTimeoutException() { PlainActionFuture listener = new PlainActionFuture<>(); var requestTask = new RequestTask( - mock(HttpRequestBase.class), + HttpRequestTests.createMock("inferenceEntityId"), httpClient, HttpClientContext.create(), TimeValue.timeValueMillis(1), @@ -186,7 +192,7 @@ public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exceptio }).when(listener).onFailure(any()); var requestTask = new RequestTask( - mock(HttpRequestBase.class), + HttpRequestTests.createMock("inferenceEntityId"), httpClient, HttpClientContext.create(), TimeValue.timeValueMillis(1), @@ -226,7 +232,7 @@ public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { }).when(listener).onFailure(any()); var requestTask = new RequestTask( - mock(HttpRequestBase.class), + HttpRequestTests.createMock("inferenceEntityId"), httpClient, HttpClientContext.create(), TimeValue.timeValueMillis(1), @@ -263,7 +269,7 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnFail ActionListener listener = mock(ActionListener.class); var requestTask = new RequestTask( - mock(HttpRequestBase.class), + HttpRequestTests.createMock("inferenceEntityId"), httpClient, HttpClientContext.create(), TimeValue.timeValueMillis(1), @@ -297,7 +303,7 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnResp ActionListener listener = mock(ActionListener.class); var requestTask = new RequestTask( - mock(HttpRequestBase.class), + HttpRequestTests.createMock("inferenceEntityId"), httpClient, HttpClientContext.create(), TimeValue.timeValueMillis(1), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java index f4642d0bf48c2..6abddf03fef39 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java @@ -9,13 +9,13 @@ import org.apache.http.HttpResponse; import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.RequestTests; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.Is.is; @@ -30,7 +30,7 @@ public void testCheckForFailureStatusCode() { var httpResponse = mock(HttpResponse.class); when(httpResponse.getStatusLine()).thenReturn(statusLine); - var httpRequest = mock(HttpRequestBase.class); + var mockRequest = RequestTests.mockRequest("id"); var httpResult = new HttpResult(httpResponse, new byte[] {}); @@ -38,62 +38,65 @@ public void testCheckForFailureStatusCode() { // 200 ok when(statusLine.getStatusCode()).thenReturn(200); - handler.checkForFailureStatusCode(httpRequest, httpResult); + handler.checkForFailureStatusCode(mockRequest, httpResult); // 503 when(statusLine.getStatusCode()).thenReturn(503); - var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received a rate limit status code for request [null] status [503]") + containsString("Received a rate limit status code for request from inference entity id [id] status [503]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 502 when(statusLine.getStatusCode()).thenReturn(502); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received a rate limit status code for request [null] status [502]") + containsString("Received a rate limit status code for request from inference entity id [id] status [502]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 429 when(statusLine.getStatusCode()).thenReturn(429); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received a rate limit status code for request [null] status [429]") + containsString("Received a rate limit status code for request from inference entity id [id] status [429]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); // 413 when(statusLine.getStatusCode()).thenReturn(413); - retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.REQUEST_ENTITY_TOO_LARGE)); // 401 when(statusLine.getStatusCode()).thenReturn(401); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an authentication error status code for request [null] status [401]") + containsString("Received an authentication error status code for request from inference entity id [id] status [401]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); // 300 when(statusLine.getStatusCode()).thenReturn(300); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); - assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat( + retryException.getCause().getMessage(), + containsString("Unhandled redirection for request from inference entity id [id] status [300]") + ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); // 402 when(statusLine.getStatusCode()).thenReturn(402); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an unsuccessful status code for request [null] status [402]") + containsString("Received an unsuccessful status code for request from inference entity id [id] status [402]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java index e3d1c19b0452b..bda709017f266 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -11,7 +11,6 @@ import org.apache.http.HeaderElement; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; @@ -20,6 +19,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.RequestTests; import java.nio.charset.StandardCharsets; @@ -40,85 +40,106 @@ public void testCheckForFailureStatusCode() { when(header.getElements()).thenReturn(new HeaderElement[] {}); when(httpResponse.getFirstHeader(anyString())).thenReturn(header); - var httpRequest = mock(HttpRequestBase.class); + var mockRequest = RequestTests.mockRequest("id"); var httpResult = new HttpResult(httpResponse, new byte[] {}); var handler = new OpenAiResponseHandler("", (request, result) -> null); // 200 ok when(statusLine.getStatusCode()).thenReturn(200); - handler.checkForFailureStatusCode(httpRequest, httpResult); + handler.checkForFailureStatusCode(mockRequest, httpResult); // 503 when(statusLine.getStatusCode()).thenReturn(503); - var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a server busy error status code for request from inference entity id [id] status [503]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 501 + when(statusLine.getStatusCode()).thenReturn(501); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received a server error status code for request [null] status [503]") + containsString("Received a server error status code for request from inference entity id [id] status [501]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 500 + when(statusLine.getStatusCode()).thenReturn(500); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a server error status code for request from inference entity id [id] status [500]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 429 when(statusLine.getStatusCode()).thenReturn(429); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat(retryException.getCause().getMessage(), containsString("Received a rate limit status code. Token limit")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); // 413 when(statusLine.getStatusCode()).thenReturn(413); - retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertTrue(retryException.shouldRetry()); assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.REQUEST_ENTITY_TOO_LARGE)); // 400 content too large retryException = expectThrows( ContentTooLargeException.class, - () -> handler.checkForFailureStatusCode(httpRequest, createContentTooLargeResult(400)) + () -> handler.checkForFailureStatusCode(mockRequest, createContentTooLargeResult(400)) ); assertTrue(retryException.shouldRetry()); assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 400 generic bad request should not be marked as a content too large when(statusLine.getStatusCode()).thenReturn(400); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an unsuccessful status code for request [null] status [400]") + containsString("Received an unsuccessful status code for request from inference entity id [id] status [400]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 400 is not flagged as a content too large when the error message is different when(statusLine.getStatusCode()).thenReturn(400); retryException = expectThrows( RetryException.class, - () -> handler.checkForFailureStatusCode(httpRequest, createResult(400, "blah")) + () -> handler.checkForFailureStatusCode(mockRequest, createResult(400, "blah")) ); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an unsuccessful status code for request [null] status [400]") + containsString("Received an unsuccessful status code for request from inference entity id [id] status [400]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 401 when(statusLine.getStatusCode()).thenReturn(401); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an authentication error status code for request [null] status [401]") + containsString("Received an authentication error status code for request from inference entity id [id] status [401]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); // 300 when(statusLine.getStatusCode()).thenReturn(300); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); - assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat( + retryException.getCause().getMessage(), + containsString("Unhandled redirection for request from inference entity id [id] status [300]") + ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); // 402 when(statusLine.getStatusCode()).thenReturn(402); - retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); assertFalse(retryException.shouldRetry()); assertThat( retryException.getCause().getMessage(), - containsString("Received an unsuccessful status code for request [null] status [402]") + containsString("Received an unsuccessful status code for request from inference entity id [id] status [402]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/HttpRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/HttpRequestTests.java new file mode 100644 index 0000000000000..d5c2b6969fce3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/HttpRequestTests.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request; + +import org.apache.http.client.methods.HttpRequestBase; + +import static org.mockito.Mockito.mock; + +public class HttpRequestTests { + public static HttpRequest createMock(String modelId) { + return new HttpRequest(mock(HttpRequestBase.class), modelId); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/RequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/RequestTests.java new file mode 100644 index 0000000000000..d6c4db2294253 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/RequestTests.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RequestTests { + public static Request mockRequest(String modelId) { + var request = mock(Request.class); + when(request.getInferenceEntityId()).thenReturn(modelId); + + return request; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java new file mode 100644 index 0000000000000..8ef9ea4b0316b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class CohereEmbeddingsRequestEntityTests extends ESTestCase { + public void testXContent_WritesAllFields_WhenTheyAreDefined() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START), + "model", + CohereEmbeddingType.FLOAT + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_document","embedding_types":["float"],"truncate":"start"}""")); + } + + public void testXContent_InputTypeSearch_EmbeddingTypesInt8_TruncateNone() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE), + "model", + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); + } + + public void testXContent_WritesNoOptionalFields_WhenTheyAreNotDefined() throws IOException { + var entity = new CohereEmbeddingsRequestEntity(List.of("abc"), CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"]}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java new file mode 100644 index 0000000000000..df61417ffff9c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -0,0 +1,169 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class CohereEmbeddingsRequestTests extends ESTestCase { + public void testCreateRequest_UrlDefined() throws URISyntaxException, IOException { + var request = createRequest( + List.of("abc"), + CohereEmbeddingsModelTests.createModel("url", "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, null, null) + ); + + var httpRequest = request.createHttpRequest(); + MatcherAssert.assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc")))); + } + + public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOException { + var request = createRequest( + List.of("abc"), + CohereEmbeddingsModelTests.createModel( + "url", + "secret", + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ) + ); + + var httpRequest = request.createHttpRequest(); + MatcherAssert.assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_document", + "embedding_types", + List.of("float"), + "truncate", + "start" + ) + ) + ); + } + + public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() throws URISyntaxException, IOException { + var request = createRequest( + List.of("abc"), + CohereEmbeddingsModelTests.createModel( + "url", + "secret", + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.END), + null, + null, + "model", + CohereEmbeddingType.INT8 + ) + ); + + var httpRequest = request.createHttpRequest(); + MatcherAssert.assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_query", + "embedding_types", + List.of("int8"), + "truncate", + "end" + ) + ) + ); + } + + public void testCreateRequest_TruncateNone() throws URISyntaxException, IOException { + var request = createRequest( + List.of("abc"), + CohereEmbeddingsModelTests.createModel( + "url", + "secret", + new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE), + null, + null, + null, + null + ) + ); + + var httpRequest = request.createHttpRequest(); + MatcherAssert.assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none"))); + } + + public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) throws URISyntaxException { + var account = new CohereAccount(model.getServiceSettings().getCommonSettings().getUri(), model.getSecretSettings().apiKey()); + return new CohereEmbeddingsRequest(account, input, model); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java index 17124b6ea6d7a..469ea28d42e5d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; import java.io.IOException; import java.net.URI; @@ -30,10 +31,10 @@ public class HuggingFaceInferenceRequestTests extends ESTestCase { @SuppressWarnings("unchecked") public void testCreateRequest() throws URISyntaxException, IOException { var huggingFaceRequest = createRequest("www.google.com", "secret", "abc"); - var httpRequest = huggingFaceRequest.createRequest(); + var httpRequest = huggingFaceRequest.createHttpRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); assertThat(httpPost.getURI().toString(), is("www.google.com")); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); @@ -51,10 +52,10 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws URISyntaxException, var truncatedRequest = huggingFaceRequest.truncate(); assertThat(truncatedRequest.getURI().toString(), is(new URI("www.google.com").toString())); - var httpRequest = truncatedRequest.createRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); + var httpRequest = truncatedRequest.createHttpRequest(); + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + var httpPost = (HttpPost) httpRequest.httpRequestBase(); var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap.get("inputs"), instanceOf(List.class)); assertThat(requestMap.get("inputs"), is(List.of("ab"))); @@ -74,7 +75,8 @@ public static HuggingFaceInferenceRequest createRequest(String url, String apiKe return new HuggingFaceInferenceRequest( TruncatorTests.createTruncator(), account, - new Truncator.TruncationResult(List.of(input), new boolean[] { false }) + new Truncator.TruncationResult(List.of(input), new boolean[] { false }), + HuggingFaceEmbeddingsModelTests.createModel(url, apiKey) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java index cbbd83c896d28..4c4c40e9c1056 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java @@ -9,17 +9,15 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URI; import java.net.URISyntaxException; import java.util.List; @@ -33,10 +31,10 @@ public class OpenAiEmbeddingsRequestTests extends ESTestCase { public void testCreateRequest_WithUrlOrganizationUserDefined() throws URISyntaxException, IOException { var request = createRequest("www.google.com", "org", "secret", "abc", "model", "user"); - var httpRequest = request.createRequest(); + var httpRequest = request.createHttpRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); assertThat(httpPost.getURI().toString(), is("www.google.com")); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); @@ -52,10 +50,10 @@ public void testCreateRequest_WithUrlOrganizationUserDefined() throws URISyntaxE public void testCreateRequest_WithDefaultUrl() throws URISyntaxException, IOException { var request = createRequest(null, "org", "secret", "abc", "model", "user"); - var httpRequest = request.createRequest(); + var httpRequest = request.createHttpRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); assertThat(httpPost.getURI().toString(), is(buildDefaultUri().toString())); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); @@ -71,10 +69,10 @@ public void testCreateRequest_WithDefaultUrl() throws URISyntaxException, IOExce public void testCreateRequest_WithDefaultUrlAndWithoutUserOrganization() throws URISyntaxException, IOException { var request = createRequest(null, null, "secret", "abc", "model", null); - var httpRequest = request.createRequest(); + var httpRequest = request.createHttpRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); assertThat(httpPost.getURI().toString(), is(buildDefaultUri().toString())); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); @@ -92,17 +90,17 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws URISyntaxException, var truncatedRequest = request.truncate(); assertThat(request.getURI().toString(), is(buildDefaultUri().toString())); - var httpRequest = truncatedRequest.createRequest(); - assertThat(httpRequest, instanceOf(HttpPost.class)); + var httpRequest = truncatedRequest.createHttpRequest(); + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); - var httpPost = (HttpPost) httpRequest; + var httpPost = (HttpPost) httpRequest.httpRequestBase(); var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("input"), is(List.of("ab"))); assertThat(requestMap.get("model"), is("model")); } - public void testIsTruncated_ReturnsTrue() throws URISyntaxException, IOException { + public void testIsTruncated_ReturnsTrue() { var request = createRequest(null, null, "secret", "abcd", "model", null); assertFalse(request.getTruncationInfo()[0]); @@ -117,16 +115,15 @@ public static OpenAiEmbeddingsRequest createRequest( String input, String model, @Nullable String user - ) throws URISyntaxException { - var uri = url == null ? null : new URI(url); - - var account = new OpenAiAccount(uri, org, new SecureString(apiKey.toCharArray())); + ) { + var embeddingsModel = OpenAiEmbeddingsModelTests.createModel(url, org, apiKey, model, user, null); + var account = new OpenAiAccount(embeddingsModel.getServiceSettings().uri(), org, embeddingsModel.getSecretSettings().apiKey()); return new OpenAiEmbeddingsRequest( TruncatorTests.createTruncator(), account, new Truncator.TruncationResult(List.of(input), new boolean[] { false }), - new OpenAiEmbeddingsTaskSettings(model, user) + embeddingsModel ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java new file mode 100644 index 0000000000000..f04715be0838f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java @@ -0,0 +1,476 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class CohereEmbeddingsResponseEntityTests extends ESTestCase { + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": [ + [ + -0.0018434525, + 0.01777649 + ] + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + InferenceServiceResults parsedResults = CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(TextEmbeddingResults.class)); + MatcherAssert.assertThat( + ((TextEmbeddingResults) parsedResults).embeddings(), + is(List.of(new TextEmbeddingResults.Embedding(List.of(-0.0018434525F, 0.01777649F)))) + ); + } + + public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + -0.0018434525, + 0.01777649 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is(List.of(new TextEmbeddingResults.Embedding(List.of(-0.0018434525F, 0.01777649F)))) + ); + } + + public void testFromResponse_UsesTheFirstValidEmbeddingsEntry() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + -0.0018434525, + 0.01777649 + ] + ], + "int8": [ + [ + -1, + 0 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is(List.of(new TextEmbeddingResults.Embedding(List.of(-0.0018434525F, 0.01777649F)))) + ); + } + + public void testFromResponse_UsesTheFirstValidEmbeddingsEntryInt8_WithInvalidFirst() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "invalid_type": [ + [ + -0.0018434525, + 0.01777649 + ] + ], + "int8": [ + [ + -1, + 0 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is(List.of(new TextEmbeddingByteResults.Embedding(List.of((byte) -1, (byte) 0)))) + ); + } + + public void testFromResponse_ParsesBytes() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "int8": [ + [ + -1, + 0 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is(List.of(new TextEmbeddingByteResults.Embedding(List.of((byte) -1, (byte) 0)))) + ); + } + + public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": [ + [ + -0.0018434525, + 0.01777649 + ], + [ + -0.123, + 0.123 + ] + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(-0.0018434525F, 0.01777649F)), + new TextEmbeddingResults.Embedding(List.of(-0.123F, 0.123F)) + ) + ) + ); + } + + public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + -0.0018434525, + 0.01777649 + ], + [ + -0.123, + 0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(-0.0018434525F, 0.01777649F)), + new TextEmbeddingResults.Embedding(List.of(-0.123F, 0.123F)) + ) + ) + ); + } + + public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings_not_here": [ + [ + -0.0018434525, + 0.01777649 + ] + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Failed to find required field [embeddings] in Cohere embeddings response") + ); + } + + public void testFromResponse_FailsWhenEmbeddingsByteValue_IsOutsideByteRange_Negative() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "int8": [ + [ + -129, + 127 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Value [-129] is out of range for a byte")); + } + + public void testFromResponse_FailsWhenEmbeddingsByteValue_IsOutsideByteRange_Positive() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "int8": [ + [ + -128, + 128 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Value [128] is out of range for a byte")); + } + + public void testFromResponse_FailsToFindAValidEmbeddingType() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "invalid_type": [ + [ + -0.0018434525, + 0.01777649 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Failed to find a supported embedding type in the Cohere embeddings response. Supported types are [float, int8]") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntityTests.java new file mode 100644 index 0000000000000..a2b1c26b2b3d5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereErrorResponseEntityTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.apache.http.HttpResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class CohereErrorResponseEntityTests extends ESTestCase { + public void testFromResponse() { + String responseJson = """ + { + "message": "invalid request: total number of texts must be at most 96 - received 97" + } + """; + + CohereErrorResponseEntity errorMessage = CohereErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNotNull(errorMessage); + MatcherAssert.assertThat( + errorMessage.getErrorMessage(), + is("invalid request: total number of texts must be at most 96 - received 97") + ); + } + + public void testFromResponse_noMessage() { + String responseJson = """ + { + "error": "abc" + } + """; + + CohereErrorResponseEntity errorMessage = CohereErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNull(errorMessage); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index 0f37ac87fe45a..b672260d36f7c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -41,14 +41,17 @@ public static TestModel createRandomInstance() { } public TestModel( - String modelId, + String inferenceEntityId, TaskType taskType, String service, TestServiceSettings serviceSettings, TestTaskSettings taskSettings, TestSecretSettings secretSettings ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings) + ); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index a8ea237ba8b0c..2417148c84ac2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -153,7 +153,7 @@ public void testGetModelWithSecrets() { registry.getModelWithSecrets("1", listener); var modelConfig = listener.actionGet(TIMEOUT); - assertEquals("1", modelConfig.modelId()); + assertEquals("1", modelConfig.inferenceEntityId()); assertEquals("foo", modelConfig.service()); assertEquals(TaskType.SPARSE_EMBEDDING, modelConfig.taskType()); assertThat(modelConfig.settings().keySet(), empty()); @@ -183,7 +183,7 @@ public void testGetModelNoSecrets() { registry.getModel("1", listener); var modelConfig = listener.actionGet(TIMEOUT); - assertEquals("1", modelConfig.modelId()); + assertEquals("1", modelConfig.inferenceEntityId()); assertEquals("foo", modelConfig.service()); assertEquals(TaskType.SPARSE_EMBEDDING, modelConfig.taskType()); assertThat(modelConfig.settings().keySet(), empty()); @@ -229,7 +229,7 @@ public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() { is( format( "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service", - model.getConfigurations().getModelId() + model.getConfigurations().getInferenceEntityId() ) ) ); @@ -255,7 +255,10 @@ public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVe registry.storeModel(model, listener); ResourceAlreadyExistsException exception = expectThrows(ResourceAlreadyExistsException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(exception.getMessage(), is(format("Inference model [%s] already exists", model.getConfigurations().getModelId()))); + assertThat( + exception.getMessage(), + is(format("Inference model [%s] already exists", model.getConfigurations().getInferenceEntityId())) + ); } public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { @@ -278,7 +281,10 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { registry.storeModel(model, listener); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(exception.getMessage(), is(format("Failed to store inference model [%s]", model.getConfigurations().getModelId()))); + assertThat( + exception.getMessage(), + is(format("Failed to store inference model [%s]", model.getConfigurations().getInferenceEntityId())) + ); } private Client mockBulkClient() { @@ -299,7 +305,7 @@ private static void mockClientExecuteSearch(Client client, SearchResponse search doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; - actionListener.onResponse(searchResponse); + ActionListener.respondAndRelease(actionListener, searchResponse); return Void.TYPE; }).when(client).execute(any(), any(), any()); } @@ -314,10 +320,13 @@ private static void mockClientExecuteBulk(Client client, BulkResponse bulkRespon } private static SearchResponse mockSearchResponse(SearchHit[] hits) { - SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 1); - var searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(searchHits); + SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 1); + try { + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + } finally { + searchHits.decRef(); + } return searchResponse; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java new file mode 100644 index 0000000000000..b9318db6ece34 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { + public static TextEmbeddingByteResults createRandomResults() { + int embeddings = randomIntBetween(1, 10); + List embeddingResults = new ArrayList<>(embeddings); + + for (int i = 0; i < embeddings; i++) { + embeddingResults.add(createRandomEmbedding()); + } + + return new TextEmbeddingByteResults(embeddingResults); + } + + private static TextEmbeddingByteResults.Embedding createRandomEmbedding() { + int columns = randomIntBetween(1, 10); + List floats = new ArrayList<>(columns); + + for (int i = 0; i < columns; i++) { + floats.add(randomByte()); + } + + return new TextEmbeddingByteResults.Embedding(floats); + } + + public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { + var entity = new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(List.of((byte) 23)))); + + assertThat( + entity.asMap(), + is( + Map.of( + TextEmbeddingByteResults.TEXT_EMBEDDING, + List.of(Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, List.of((byte) 23))) + ) + ) + ); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "text_embedding" : [ + { + "embedding" : [ + 23 + ] + } + ] + }""")); + } + + public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { + var entity = new TextEmbeddingByteResults( + List.of(new TextEmbeddingByteResults.Embedding(List.of((byte) 23)), new TextEmbeddingByteResults.Embedding(List.of((byte) 24))) + + ); + + assertThat( + entity.asMap(), + is( + Map.of( + TextEmbeddingByteResults.TEXT_EMBEDDING, + List.of( + Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, List.of((byte) 23)), + Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, List.of((byte) 24)) + ) + ) + ) + ); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "text_embedding" : [ + { + "embedding" : [ + 23 + ] + }, + { + "embedding" : [ + 24 + ] + } + ] + }""")); + } + + public void testTransformToCoordinationFormat() { + var results = new TextEmbeddingByteResults( + List.of( + new TextEmbeddingByteResults.Embedding(List.of((byte) 23, (byte) 24)), + new TextEmbeddingByteResults.Embedding(List.of((byte) 25, (byte) 26)) + ) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingByteResults.TEXT_EMBEDDING, + new double[] { 23F, 24F }, + false + ), + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingByteResults.TEXT_EMBEDDING, + new double[] { 25F, 26F }, + false + ) + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return TextEmbeddingByteResults::new; + } + + @Override + protected TextEmbeddingByteResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected TextEmbeddingByteResults mutateInstance(TextEmbeddingByteResults instance) throws IOException { + // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list + if (randomBoolean()) { + // -1 to remove at least one item from the list + int end = randomInt(instance.embeddings().size() - 1); + return new TextEmbeddingByteResults(instance.embeddings().subList(0, end)); + } else { + List embeddings = new ArrayList<>(instance.embeddings()); + embeddings.add(createRandomEmbedding()); + return new TextEmbeddingByteResults(embeddings); + } + } + + public static Map buildExpectation(List> embeddings) { + return Map.of( + TextEmbeddingByteResults.TEXT_EMBEDDING, + embeddings.stream().map(embedding -> Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, embedding)).toList() + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index bae2e7e9b68c9..31d7667fa6665 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -116,13 +116,18 @@ public String name() { } @Override - public Model parseRequestConfig(String modelId, TaskType taskType, Map config, Set platfromArchitectures) { + public Model parseRequestConfig( + String inferenceEntityId, + TaskType taskType, + Map config, + Set platfromArchitectures + ) { return null; } @Override public Model parsePersistedConfigWithSecrets( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Map secrets @@ -131,7 +136,7 @@ public Model parsePersistedConfigWithSecrets( } @Override - public Model parsePersistedConfig(String modelId, TaskType taskType, Map config) { + public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { return null; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index eb54745806a68..b935c5a8c64b3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -8,24 +8,45 @@ package org.elasticsearch.xpack.inference.services; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; +import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.getEmbeddingSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ServiceUtilsTests extends ESTestCase { + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + public void testRemoveAsTypeWithTheCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); @@ -105,10 +126,11 @@ public void testConvertToUri_CreatesUri() { assertThat(uri.toString(), is("www.elastic.co")); } - public void testConvertToUri_ThrowsNullPointerException_WhenPassedNull() { + public void testConvertToUri_DoesNotThrowNullPointerException_WhenPassedNull() { var validation = new ValidationException(); - expectThrows(NullPointerException.class, () -> convertToUri(null, "name", "scope", validation)); + var uri = convertToUri(null, "name", "scope", validation); + assertNull(uri); assertTrue(validation.validationErrors().isEmpty()); } @@ -236,6 +258,124 @@ public void testExtractOptionalString_AddsException_WhenFieldIsEmpty() { assertThat(validation.validationErrors().get(0), is("[scope] Invalid value empty string. [key] must be a non-empty string")); } + public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { + var validation = new ValidationException(); + Map map = modifiableMap(Map.of("key", "value")); + var createdEnum = extractOptionalEnum(map, "abc", "scope", InputType::fromString, InputType.values(), validation); + + assertNull(createdEnum); + assertTrue(validation.validationErrors().isEmpty()); + assertThat(map.size(), is(1)); + } + + public void testExtractOptionalEnum_ReturnsNullAndAddsException_WhenAnInvalidValueExists() { + var validation = new ValidationException(); + Map map = modifiableMap(Map.of("key", "invalid_value")); + var createdEnum = extractOptionalEnum(map, "key", "scope", InputType::fromString, InputType.values(), validation); + + assertNull(createdEnum); + assertFalse(validation.validationErrors().isEmpty()); + assertTrue(map.isEmpty()); + assertThat( + validation.validationErrors().get(0), + is("[scope] Invalid value [invalid_value] received. [key] must be one of [ingest, search]") + ); + } + + public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() { + var service = mock(InferenceService.class); + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[3]; + listener.onResponse(new TextEmbeddingResults(List.of())); + + return Void.TYPE; + }).when(service).infer(any(), any(), any(), any()); + + PlainActionFuture listener = new PlainActionFuture<>(); + getEmbeddingSize(model, service, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Could not determine embedding size")); + assertThat(thrownException.getCause().getMessage(), is("Embeddings list is empty")); + } + + public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmpty() { + var service = mock(InferenceService.class); + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[3]; + listener.onResponse(new TextEmbeddingByteResults(List.of())); + + return Void.TYPE; + }).when(service).infer(any(), any(), any(), any()); + + PlainActionFuture listener = new PlainActionFuture<>(); + getEmbeddingSize(model, service, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Could not determine embedding size")); + assertThat(thrownException.getCause().getMessage(), is("Embeddings list is empty")); + } + + public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingResults() { + var service = mock(InferenceService.class); + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var textEmbedding = TextEmbeddingResultsTests.createRandomResults(); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[3]; + listener.onResponse(textEmbedding); + + return Void.TYPE; + }).when(service).infer(any(), any(), any(), any()); + + PlainActionFuture listener = new PlainActionFuture<>(); + getEmbeddingSize(model, service, listener); + + var size = listener.actionGet(TIMEOUT); + + assertThat(size, is(textEmbedding.embeddings().get(0).values().size())); + } + + public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { + var service = mock(InferenceService.class); + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var textEmbedding = TextEmbeddingByteResultsTests.createRandomResults(); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[3]; + listener.onResponse(textEmbedding); + + return Void.TYPE; + }).when(service).infer(any(), any(), any(), any()); + + PlainActionFuture listener = new PlainActionFuture<>(); + getEmbeddingSize(model, service, listener); + + var size = listener.actionGet(TIMEOUT); + + assertThat(size, is(textEmbedding.embeddings().get(0).values().size())); + } + private static Map modifiableMap(Map aMap) { return new HashMap<>(aMap); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java index 59abda79abad0..bfb019d2f8f59 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java @@ -14,9 +14,9 @@ import static org.mockito.Mockito.when; public class Utils { - public static Model getInvalidModel(String modelId, String serviceName) { + public static Model getInvalidModel(String inferenceEntityId, String serviceName) { var mockConfigs = mock(ModelConfigurations.class); - when(mockConfigs.getModelId()).thenReturn(modelId); + when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); when(mockConfigs.getService()).thenReturn(serviceName); var mockModel = mock(Model.class); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java new file mode 100644 index 0000000000000..6f47d5c74d81c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class CohereServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static CohereServiceSettings createRandomWithNonNullUrl() { + return createRandom(randomAlphaOfLength(15)); + } + + /** + * The created settings can have a url set to null. + */ + public static CohereServiceSettings createRandom() { + var url = randomBoolean() ? randomAlphaOfLength(15) : null; + return createRandom(url); + } + + private static CohereServiceSettings createRandom(String url) { + SimilarityMeasure similarityMeasure = null; + Integer dims = null; + var isTextEmbeddingModel = randomBoolean(); + if (isTextEmbeddingModel) { + similarityMeasure = SimilarityMeasure.DOT_PRODUCT; + dims = 1536; + } + Integer maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256); + var model = randomBoolean() ? randomAlphaOfLength(15) : null; + + return new CohereServiceSettings(ServiceUtils.createOptionalUri(url), similarityMeasure, dims, maxInputTokens, model); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.MODEL, + model + ) + ) + ); + + MatcherAssert.assertThat( + serviceSettings, + is(new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model)) + ); + } + + public void testFromMap_MissingUrl_DoesNotThrowException() { + var serviceSettings = CohereServiceSettings.fromMap(new HashMap<>(Map.of())); + assertNull(serviceSettings.getUri()); + } + + public void testFromMap_EmptyUrl_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + ServiceFields.URL + ) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) + ); + } + + public void testFromMap_InvalidSimilarity_ThrowsError() { + var similarity = "by_size"; + var thrownException = expectThrows( + ValidationException.class, + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.SIMILARITY, similarity))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];") + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereServiceSettings::new; + } + + @Override + protected CohereServiceSettings createTestInstance() { + return createRandomWithNonNullUrl(); + } + + @Override + protected CohereServiceSettings mutateInstance(CohereServiceSettings instance) throws IOException { + return null; + } + + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model) { + var map = new HashMap(); + + if (url != null) { + map.put(ServiceFields.URL, url); + } + + if (model != null) { + map.put(CohereServiceSettings.MODEL, model); + } + + return map; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java new file mode 100644 index 0000000000000..0250e08a48452 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -0,0 +1,895 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere; + +import org.apache.http.HttpHeaders; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMapEmpty; +import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class CohereServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT), + getTaskSettingsMap(InputType.INGEST, CohereTruncation.START), + getSecretSettingsMap("secret") + ), + Set.of() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); + MatcherAssert.assertThat( + embeddingsModel.getTaskSettings(), + is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START)) + ); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + getSecretSettingsMap("secret") + ), + Set.of() + ) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("The [cohere] service does not support task type [sparse_embedding]") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var config = getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + getSecretSettingsMap("secret") + ); + config.put("extra_key", "value"); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null); + serviceSettings.put("extra_key", "value"); + + var config = getRequestConfigMap(serviceSettings, getTaskSettingsMap(null, null), getSecretSettingsMap("secret")); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = getTaskSettingsMap(InputType.INGEST, null); + taskSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + taskSettingsMap, + getSecretSettingsMap("secret") + ); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + secretSettingsMap + ); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + ); + } + } + + public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null), + getTaskSettingsMapEmpty(), + getSecretSettingsMap("secret") + ), + Set.of() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, null), + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + getSecretSettingsMap("secret") + ); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.SPARSE_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Failed to parse stored model [id] for [cohere] service, please delete and add the service again") + ); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null), + getTaskSettingsMap(InputType.INGEST, null), + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.INT8), + getTaskSettingsMap(InputType.SEARCH, CohereTruncation.NONE), + getSecretSettingsMap("secret") + ); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.INT8)); + MatcherAssert.assertThat( + embeddingsModel.getTaskSettings(), + is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE)) + ); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + secretSettingsMap + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, null), + getSecretSettingsMap("secret") + ); + persistedConfig.secrets.put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMapEmpty(), getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = getTaskSettingsMap(InputType.SEARCH, null); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + taskSettingsMap, + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, CohereTruncation.NONE) + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty() + ); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is("Failed to parse stored model [id] for [cohere] service, please delete and add the service again") + ); + } + } + + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, "model", CohereEmbeddingType.FLOAT), + getTaskSettingsMap(null, null) + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty() + ); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMap(InputType.SEARCH, null)); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new CohereService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = getTaskSettingsMap(InputType.INGEST, null); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + taskSettingsMap + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name"); + + try (var service = new CohereService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(mockModel, List.of(""), new HashMap<>(), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + MatcherAssert.assertThat( + thrownException.getMessage(), + is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") + ); + + verify(factory, times(1)).createSender(anyString()); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testInfer_SendsRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + 1024, + 1024, + "model", + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + } + } + + public void testCheckModelConfig_UpdatesDimensions() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 1, + null, + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result, + // the dimension is set to 2 because there are 2 embeddings returned from the mock server + is( + CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 2, + null, + null + ) + ) + ); + } + } + + public void testInfer_UnauthorisedResponse() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "message": "invalid api token" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(401).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 1024, + 1024, + null, + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + MatcherAssert.assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); + MatcherAssert.assertThat(error.getMessage(), containsString("Error message: [invalid api token]")); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + } + } + + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + var builtServiceSettings = new HashMap<>(); + builtServiceSettings.putAll(serviceSettings); + builtServiceSettings.putAll(secretSettings); + + return new HashMap<>( + Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings) + ); + } + + private PeristedConfig getPersistedConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + + return new PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) + ); + } + + private PeristedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { + + return new PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + null + ); + } + + private record PeristedConfig(Map config, Map secrets) {} +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java new file mode 100644 index 0000000000000..1961d6b168d54 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import org.hamcrest.MatcherAssert; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class CohereEmbeddingsModelTests extends ESTestCase { + + public void testOverrideWith_OverridesInputType_WithSearch() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + + var overriddenModel = model.overrideWith(getTaskSettingsMap(InputType.SEARCH, null)); + var expectedModel = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.SEARCH, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); + } + + public void testOverrideWith_DoesNotOverride_WhenSettingsAreEmpty() { + var model = createModel("url", "api_key", null, null, null); + + var overriddenModel = model.overrideWith(Map.of()); + MatcherAssert.assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_DoesNotOverride_WhenSettingsAreNull() { + var model = createModel("url", "api_key", null, null, null); + + var overriddenModel = model.overrideWith(null); + MatcherAssert.assertThat(overriddenModel, sameInstance(model)); + } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + @Nullable Integer tokenLimit, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType + ) { + return createModel(url, apiKey, CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, tokenLimit, null, model, embeddingType); + } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType + ) { + return createModel(url, apiKey, CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, tokenLimit, dimensions, model, embeddingType); + } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType + ) { + return new CohereEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(url, SimilarityMeasure.DOT_PRODUCT, dimensions, tokenLimit, model), + embeddingType + ), + taskSettings, + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java new file mode 100644 index 0000000000000..e0b29ce9c34da --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class CohereEmbeddingsServiceSettingsTests extends AbstractWireSerializingTestCase { + public static CohereEmbeddingsServiceSettings createRandom() { + var commonSettings = CohereServiceSettingsTests.createRandom(); + var embeddingType = randomBoolean() ? randomFrom(CohereEmbeddingType.values()) : null; + + return new CohereEmbeddingsServiceSettings(commonSettings, embeddingType); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.MODEL, + model, + CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, + CohereEmbeddingType.INT8.toString() + ) + ) + ); + + MatcherAssert.assertThat( + serviceSettings, + is( + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), + CohereEmbeddingType.INT8 + ) + ) + ); + } + + public void testFromMap_MissingEmbeddingType_DoesNotThrowException() { + var serviceSettings = CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of())); + assertNull(serviceSettings.getEmbeddingType()); + } + + public void testFromMap_EmptyEmbeddingType_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + CohereEmbeddingsServiceSettings.EMBEDDING_TYPE + ) + ) + ); + } + + public void testFromMap_InvalidEmbeddingType_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc"))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value [abc] received. [embedding_type] must be one of [float, int8];" + ) + ) + ); + } + + public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { + var exception = expectThrows( + ElasticsearchStatusException.class, + () -> CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, List.of("abc"))) + ) + ); + + MatcherAssert.assertThat( + exception.getMessage(), + is("field [embedding_type] is not of the expected type. The value [[abc]] cannot be converted to a [String]") + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereEmbeddingsServiceSettings::new; + } + + @Override + protected CohereEmbeddingsServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereEmbeddingsServiceSettings mutateInstance(CohereEmbeddingsServiceSettings instance) throws IOException { + return null; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + public static Map getServiceSettingsMap( + @Nullable String url, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType + ) { + var map = new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); + + if (embeddingType != null) { + map.put(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, embeddingType.toString()); + } + + return map; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java new file mode 100644 index 0000000000000..164d3998f138f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class CohereEmbeddingsTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static CohereEmbeddingsTaskSettings createRandom() { + var inputType = randomBoolean() ? randomFrom(InputType.values()) : null; + var truncation = randomBoolean() ? randomFrom(CohereTruncation.values()) : null; + + return new CohereEmbeddingsTaskSettings(inputType, truncation); + } + + public void testFromMap_CreatesEmptySettings_WhenAllFieldsAreNull() { + MatcherAssert.assertThat( + CohereEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of())), + is(new CohereEmbeddingsTaskSettings(null, null)) + ); + } + + public void testFromMap_CreatesSettings_WhenAllFieldsOfSettingsArePresent() { + MatcherAssert.assertThat( + CohereEmbeddingsTaskSettings.fromMap( + new HashMap<>( + Map.of( + CohereEmbeddingsTaskSettings.INPUT_TYPE, + InputType.INGEST.toString(), + CohereServiceFields.TRUNCATE, + CohereTruncation.END.toString() + ) + ) + ), + is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.END)) + ); + } + + public void testFromMap_ReturnsFailure_WhenInputTypeIsInvalid() { + var exception = expectThrows( + ValidationException.class, + () -> CohereEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsTaskSettings.INPUT_TYPE, "abc"))) + ); + + MatcherAssert.assertThat( + exception.getMessage(), + is("Validation Failed: 1: [task_settings] Invalid value [abc] received. [input_type] must be one of [ingest, search];") + ); + } + + public void testOverrideWith_KeepsOriginalValuesWhenOverridesAreNull() { + var taskSettings = CohereEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(CohereServiceSettings.MODEL, "model", CohereServiceFields.TRUNCATE, CohereTruncation.END.toString())) + ); + + var overriddenTaskSettings = taskSettings.overrideWith(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS); + MatcherAssert.assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var taskSettings = CohereEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(CohereServiceFields.TRUNCATE, CohereTruncation.END.toString())) + ); + + var requestTaskSettings = CohereEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(CohereServiceFields.TRUNCATE, CohereTruncation.START.toString())) + ); + + var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.START))); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereEmbeddingsTaskSettings::new; + } + + @Override + protected CohereEmbeddingsTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereEmbeddingsTaskSettings mutateInstance(CohereEmbeddingsTaskSettings instance) throws IOException { + return null; + } + + public static Map getTaskSettingsMapEmpty() { + return new HashMap<>(); + } + + public static Map getTaskSettingsMap(@Nullable InputType inputType, @Nullable CohereTruncation truncation) { + var map = new HashMap(); + + if (inputType != null) { + map.put(CohereEmbeddingsTaskSettings.INPUT_TYPE, inputType.toString()); + } + + if (truncation != null) { + map.put(CohereServiceFields.TRUNCATE, truncation.toString()); + } + + return map; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java index 8b6f3f1a56ba6..c6d8d852a47c2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java @@ -93,15 +93,9 @@ public void testTransportVersionIsCompatibleWithElserModelVersion() { TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED ) ); - assertTrue( - ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion( - TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED_PATCH - ) - ); + assertTrue(ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_11_X)); - assertFalse( - ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED) - ); + assertFalse(ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_10_X)); assertFalse( ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion( TransportVersions.PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME @@ -128,7 +122,7 @@ public void testBwcWrite() throws IOException { } { var settings = new ElserMlNodeServiceSettings(1, 1, ".elser_model_1"); - var copy = copyInstance(settings, TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED_PATCH); + var copy = copyInstance(settings, TransportVersions.V_8_11_X); assertEquals(settings, copy); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java index f8480709a3e40..6e74241cc754c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java @@ -25,10 +25,10 @@ public class ElserMlNodeServiceTests extends ESTestCase { - public static Model randomModelConfig(String modelId, TaskType taskType) { + public static Model randomModelConfig(String inferenceEntityId, TaskType taskType) { return switch (taskType) { case SPARSE_EMBEDDING -> new ElserMlNodeModel( - modelId, + inferenceEntityId, taskType, ElserMlNodeService.NAME, ElserMlNodeServiceSettingsTests.createRandom(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index b82812d6c393a..e9fb835016b4f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -99,7 +99,7 @@ public TransportVersion getMinimalSupportedVersion() { @Override protected HuggingFaceModel createModel( - String modelId, + String inferenceEntityId, TaskType taskType, Map serviceSettings, Map secretSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java index 89ad9fd5543df..33dbee2a32b9f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java @@ -29,4 +29,14 @@ public static HuggingFaceElserModel createModel(String url, String apiKey) { new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static HuggingFaceElserModel createModel(String url, String apiKey, String modelId) { + return new HuggingFaceElserModel( + modelId, + TaskType.SPARSE_EMBEDDING, + "service", + new HuggingFaceElserServiceSettings(url), + new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index d33ec12016cad..f297eb622c421 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.hamcrest.MatcherAssert; import java.io.IOException; import java.util.HashMap; @@ -39,7 +40,7 @@ public void testFromMap_MissingModel_ThrowException() { () -> OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user"))) ); - assertThat( + MatcherAssert.assertThat( thrownException.getMessage(), is( Strings.format( @@ -55,14 +56,14 @@ public void testFromMap_CreatesWithModelAndUser() { new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) ); - assertThat(taskSettings.model(), is("model")); - assertThat(taskSettings.user(), is("user")); + MatcherAssert.assertThat(taskSettings.model(), is("model")); + MatcherAssert.assertThat(taskSettings.user(), is("user")); } public void testFromMap_MissingUser_DoesNotThrowException() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model"))); - assertThat(taskSettings.model(), is("model")); + MatcherAssert.assertThat(taskSettings.model(), is("model")); assertNull(taskSettings.user()); } @@ -72,7 +73,7 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { ); var overriddenTaskSettings = taskSettings.overrideWith(OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS); - assertThat(overriddenTaskSettings, is(taskSettings)); + MatcherAssert.assertThat(overriddenTaskSettings, is(taskSettings)); } public void testOverrideWith_UsesOverriddenSettings() { @@ -85,7 +86,7 @@ public void testOverrideWith_UsesOverriddenSettings() { ); var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); - assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user2"))); + MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user2"))); } public void testOverrideWith_UsesOnlyNonNullModelSetting() { @@ -98,7 +99,7 @@ public void testOverrideWith_UsesOnlyNonNullModelSetting() { ); var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); - assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user"))); + MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user"))); } @Override diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java index 204bebcff4499..b6215b4efe5ba 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -78,6 +79,7 @@ public Logstash() {} @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/DeletePipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/DeletePipelineAction.java index 4dc263c98f079..15baf2026e087 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/DeletePipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/DeletePipelineAction.java @@ -15,6 +15,6 @@ public class DeletePipelineAction extends ActionType { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); private DeletePipelineAction() { - super(NAME, DeletePipelineResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/GetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/GetPipelineAction.java index 33ed6646597fc..5c3ac908a8f59 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/GetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/GetPipelineAction.java @@ -15,6 +15,6 @@ public class GetPipelineAction extends ActionType { public static final GetPipelineAction INSTANCE = new GetPipelineAction(); private GetPipelineAction() { - super(NAME, GetPipelineResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/PutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/PutPipelineAction.java index 9067321d24d48..f57adde81ab78 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/PutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/PutPipelineAction.java @@ -15,6 +15,6 @@ public class PutPipelineAction extends ActionType { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); private PutPipelineAction() { - super(NAME, PutPipelineResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java index f75dd2926059a..f595153e4d6dd 100644 --- a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java +++ b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java @@ -222,18 +222,18 @@ protected void } private SearchHits prepareSearchHits() { - SearchHit hit1 = new SearchHit(0, "1"); + SearchHit hit1 = SearchHit.unpooled(0, "1"); hit1.score(1f); hit1.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - SearchHit hit2 = new SearchHit(0, "2"); + SearchHit hit2 = SearchHit.unpooled(0, "2"); hit2.score(1f); hit2.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - SearchHit hit3 = new SearchHit(0, "3*"); + SearchHit hit3 = SearchHit.unpooled(0, "3*"); hit3.score(1f); hit3.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - return new SearchHits(new SearchHit[] { hit1, hit2, hit3 }, new TotalHits(3L, TotalHits.Relation.EQUAL_TO), 1f); + return SearchHits.unpooled(new SearchHit[] { hit1, hit2, hit3 }, new TotalHits(3L, TotalHits.Relation.EQUAL_TO), 1f); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index e2cf8c3014604..1152d93f66b38 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -240,6 +240,11 @@ public String indexName() { throw new UnsupportedOperationException(); } + @Override + public MappedFieldType.FieldExtractPreference fieldExtractPreference() { + return MappedFieldType.FieldExtractPreference.NONE; + } + @Override public SearchLookup lookup() { throw new UnsupportedOperationException(); @@ -309,7 +314,7 @@ protected IngestScriptSupport ingestScriptSupport() { } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> ((BytesRef) v).utf8ToString(); } diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java index cd8fdf7f89fbd..95fe8f0a530ba 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java @@ -367,7 +367,7 @@ protected IngestScriptSupport ingestScriptSupport() { } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> { // Numbers are in the block as a long but the test needs to compare them to their BigInteger value parsed from xcontent. if (v instanceof BigInteger ul) { diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java index 376263d5cfc99..5653ed7f4302f 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -189,7 +188,7 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> new Version((BytesRef) v).toString(); } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 2827255874224..ead7c836463fd 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -106,6 +106,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); + return; } if (request.isWaitForCompletion() == false) { diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index 9d931974d25d5..d102490820a07 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -50,29 +50,16 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' - maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' - maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' - maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } - -//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses -// the previous minor version, that setting is not available when running in FIPS until 8.14. -def maybeDisableForFips(task) { - if (BuildParams.inFipsJvm) { - if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { - task.enabled = false - } - } -} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index db53b9aec7f1f..35c2ba0e68080 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -14,7 +14,6 @@ dependencies { javaRestTestImplementation project(path: xpackModule('slm')) javaRestTestImplementation project(path: xpackModule('monitoring')) javaRestTestImplementation project(path: xpackModule('transform')) - javaRestTestImplementation project(":client:rest-high-level") } // location for keys and certificates diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java index 7b7073248b087..dbc92ddcc1e6b 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java @@ -1579,7 +1579,7 @@ public void testFeatureImportanceValues() throws Exception { TrainedModelDefinition modelDefinition = getModelDefinition(modelId); Ensemble ensemble = (Ensemble) modelDefinition.getTrainedModel(); int numberTrees = ensemble.getModels().size(); - String str = "Failure: failed for modelId %s numberTrees %d\n"; + String str = "Failure: failed for inferenceEntityId %s numberTrees %d\n"; for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); assertNotNull(destDoc); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index d12672fd4afb0..1f76fe97144a8 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -794,6 +794,91 @@ public void testLookbackOnlyGivenAggregationsWithHistogram() throws Exception { assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0")); } + /** + * This test confirms the fix for the issue + * where a datafeed with aggregations that filter everything for a bucket can go into an infinite loop. + * In this test the filter in the aggregation is crazy as it literally filters everything. Real users would + * have a filter that only occasionally results in no results from the aggregation while the query alone + * returns data. But the code path that's exercised is the same. + */ + public void testLookbackOnlyGivenAggregationsWithHistogramAndBucketFilter() throws Exception { + String jobId = "aggs-histogram-filter-job"; + Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); + createJobRequest.setJsonEntity(""" + { + "description": "Aggs job with dodgy filter", + "analysis_config": { + "bucket_span": "1h", + "summary_count_field_name": "doc_count", + "detectors": [ + { + "function": "mean", + "field_name": "responsetime", + "by_field_name": "airline" + } + ] + }, + "data_description": {"time_field": "time stamp"} + }"""); + client().performRequest(createJobRequest); + + String datafeedId = "datafeed-" + jobId; + // The "filter_everything" aggregation in here means the output is always empty. + String aggregations = """ + { + "buckets": { + "histogram": { + "field": "time stamp", + "interval": 3600000 + }, + "aggregations": { + "time stamp": { + "max": { + "field": "time stamp" + } + }, + "filter_everything" : { + "filter": { + "term" : { + "airline": "does not exist" + } + }, + "aggregations": { + "airline": { + "terms": { + "field": "airline", + "size": 10 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } + } + }"""; + // The chunking timespan of 1 hour here must be less than the span of the data in order for the problem to be reproduced + new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs").setChunkingTimespan("1h").setAggregations(aggregations).build(); + openJob(client(), jobId); + + startDatafeedAndWaitUntilStopped(datafeedId); + waitUntilJobIsClosed(jobId); + Response jobStatsResponse = client().performRequest( + new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats") + ); + String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity()); + assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":0")); + assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0")); + assertThat(jobStatsResponseAsString, containsString("\"bucket_count\":0")); + + // The most important thing this test is asserting is that we don't go into an infinite loop! + } + public void testLookbackOnlyGivenAggregationsWithDateHistogram() throws Exception { String jobId = "aggs-date-histogram-job"; Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 5f2f7cfe491ca..f9213a7fcaeb8 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -10,12 +10,12 @@ import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterModule; @@ -308,7 +308,7 @@ protected PutFilterAction.Response putMlFilter(MlFilter filter) { protected static List fetchAllAuditMessages(String jobId) throws Exception { RefreshRequest refreshRequest = new RefreshRequest(NotificationsIndex.NOTIFICATIONS_INDEX); - RefreshResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); + BroadcastResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); assertThat(refreshResponse.getStatus().getStatus(), anyOf(equalTo(200), equalTo(201))); SearchRequest searchRequest = new SearchRequestBuilder(client()).setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index a5c47524b6934..f28f6eff25b04 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -51,7 +51,6 @@ import static org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; public class ModelSnapshotRetentionIT extends MlNativeAutodetectIntegTestCase { @@ -191,8 +190,7 @@ private List getAvailableModelStateDocIds() throws Exception { private List getDocIdsFromSearch(SearchRequest searchRequest) throws Exception { List docIds = new ArrayList<>(); assertResponse(client().execute(TransportSearchAction.TYPE, searchRequest), searchResponse -> { - assertThat(searchResponse.getHits(), notNullValue()); - for (SearchHit searchHit : searchResponse.getHits().getHits()) { + for (SearchHit searchHit : searchResponse.getHits()) { docIds.add(searchHit.getId()); } }); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index a130f669583fa..f5d0b23b437f3 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -636,7 +636,7 @@ public void testAliasFields() throws Exception { // We assert on the mean prediction error in order to reduce the probability // the test fails compared to asserting on the prediction of each individual doc. double meanPredictionError = predictionErrorSum / sourceData.getHits().getHits().length; - String str = "Failure: failed for seed %d modelId %s numberTrees %d\n"; + String str = "Failure: failed for seed %d inferenceEntityId %s numberTrees %d\n"; assertThat( Strings.format(str, seed, modelId, numberTrees) + targetsPredictions + hyperparameters, meanPredictionError, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index 822f8df35949e..6dbec53994b2e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -261,6 +261,7 @@ public void testDedicatedMlNode() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102657") public void testMaxConcurrentJobAllocations() throws Exception { int numMlNodes = 2; internalCluster().ensureAtMostNumDataNodes(0); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 33fd7c108863b..942729bb81c64 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -541,6 +541,7 @@ public void testClusterWithTwoMlNodes_RunsDatafeed_GivenOriginalNodeGoesDown() t }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103108") public void testClusterWithTwoMlNodes_StopsDatafeed_GivenJobFailsOnReassign() throws Exception { internalCluster().ensureAtMostNumDataNodes(0); logger.info("Starting dedicated master node..."); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java index 51f6243778517..ffe70d9747a56 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.license.License; @@ -109,7 +109,7 @@ public void testGetTrainedModelConfig() throws Exception { ); assertThat(exceptionHolder.get(), is(nullValue())); - AtomicReference refreshResponseAtomicReference = new AtomicReference<>(); + AtomicReference refreshResponseAtomicReference = new AtomicReference<>(); blockingCall( listener -> trainedModelProvider.refreshInferenceIndex(listener), refreshResponseAtomicReference, @@ -198,7 +198,7 @@ public void testGetTrainedModelConfigWithMultiDocDefinition() throws Exception { ); blockingCall( listener -> trainedModelProvider.refreshInferenceIndex(listener), - new AtomicReference(), + new AtomicReference(), new AtomicReference<>() ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index f3254245168b8..152d8fde8c86c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -32,6 +32,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -68,6 +70,7 @@ import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; +import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; @@ -753,6 +756,7 @@ public void loadExtensions(ExtensionLoader loader) { public static final int MAX_LOW_PRIORITY_MODELS_PER_NODE = 100; private static final Logger logger = LogManager.getLogger(MachineLearning.class); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MachineLearning.class); private final Settings settings; private final boolean enabled; @@ -919,6 +923,15 @@ public Collection createComponents(PluginServices services) { return List.of(new JobManagerHolder(), new MachineLearningExtensionHolder()); } + if ("darwin-x86_64".equals(Platforms.PLATFORM_NAME)) { + String msg = "The machine learning plugin will be permanently disabled on macOS x86_64 in new minor versions released " + + "from December 2024 onwards. To continue to use machine learning functionality on macOS please switch to an arm64 " + + "machine (Apple silicon). Alternatively, it will still be possible to run Elasticsearch with machine learning " + + "enabled in a Docker container on macOS x86_64."; + logger.warn(msg); + deprecationLogger.warn(DeprecationCategory.PLUGINS, "ml-darwin-x86_64", msg); + } + machineLearningExtension.get().configure(environment.settings()); this.mlUpgradeModeActionFilter.set(new MlUpgradeModeActionFilter(clusterService)); @@ -1348,6 +1361,7 @@ public List> getPersistentTasksExecutor( @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -1962,7 +1976,7 @@ public void prepareForIndicesMigration(ClusterService clusterService, Client cli originClient.execute( SetUpgradeModeAction.INSTANCE, new SetUpgradeModeAction.Request(true), - ActionListener.wrap(r -> listener.onResponse(Collections.singletonMap("already_in_upgrade_mode", false)), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(Collections.singletonMap("already_in_upgrade_mode", false))) ); } @@ -1984,7 +1998,7 @@ public void indicesMigrationComplete( originClient.execute( SetUpgradeModeAction.INSTANCE, new SetUpgradeModeAction.Request(false), - ActionListener.wrap(r -> listener.onResponse(r.isAcknowledged()), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged())) ); } @@ -2085,40 +2099,39 @@ public void cleanUpFeature( } ); - ActionListener afterWaitingForTasks = ActionListener.wrap(listTasksResponse -> { - listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices"); - if (results.values().stream().allMatch(b -> b)) { - if (memoryTracker.get() != null) { - memoryTracker.get() - .awaitAndClear( - ActionListener.wrap( - cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener), - clearFailed -> { - logger.error( - "failed to clear memory tracker cache via machine learning reset feature API", - clearFailed - ); - SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener); - } - ) - ); - return; + // Stop all model deployments + ActionListener pipelineValidation = unsetResetModeListener.delegateFailureAndWrap( + (delegate, listTasksResponse) -> { + listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices"); + if (results.values().stream().allMatch(b -> b)) { + if (memoryTracker.get() != null) { + memoryTracker.get() + .awaitAndClear( + ActionListener.wrap( + cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate), + clearFailed -> { + logger.error( + "failed to clear memory tracker cache via machine learning reset feature API", + clearFailed + ); + SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate); + } + ) + ); + return; + } + // Call into the original listener to clean up the indices and then clear ml memory cache + SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate); + } else { + final List failedComponents = results.entrySet() + .stream() + .filter(result -> result.getValue() == false) + .map(Map.Entry::getKey) + .toList(); + delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents)); } - // Call into the original listener to clean up the indices and then clear ml memory cache - SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener); - } else { - final List failedComponents = results.entrySet() - .stream() - .filter(result -> result.getValue() == false) - .map(Map.Entry::getKey) - .toList(); - unsetResetModeListener.onFailure( - new RuntimeException("Some machine learning components failed to reset: " + failedComponents) - ); } - }, unsetResetModeListener::onFailure); - - ActionListener afterDataframesStopped = ActionListener.wrap(dataFrameStopResponse -> { + ).delegateFailureAndWrap((delegate, dataFrameStopResponse) -> { // Handle the response results.put("data_frame/analytics", dataFrameStopResponse.isStopped()); if (results.values().stream().allMatch(b -> b)) { @@ -2128,7 +2141,7 @@ public void cleanUpFeature( // This waits for all xpack actions including: allocations, anomaly detections, analytics .setActions("xpack/ml/*") .setWaitForCompletion(true) - .execute(ActionListener.wrap(listMlTasks -> { + .execute(delegate.delegateFailureAndWrap((l, listMlTasks) -> { listMlTasks.rethrowFailures("Waiting for machine learning tasks"); client.admin() .cluster() @@ -2137,48 +2150,37 @@ public void cleanUpFeature( .setDetailed(true) .setWaitForCompletion(true) .setDescriptions("*.ml-*") - .execute(afterWaitingForTasks); - }, unsetResetModeListener::onFailure)); + .execute(l); + })); } else { final List failedComponents = results.entrySet() .stream() .filter(result -> result.getValue() == false) .map(Map.Entry::getKey) .toList(); - unsetResetModeListener.onFailure( - new RuntimeException("Some machine learning components failed to reset: " + failedComponents) - ); + delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents)); } - }, unsetResetModeListener::onFailure); - - ActionListener afterAnomalyDetectionClosed = ActionListener.wrap(closeJobResponse -> { + }).delegateFailureAndWrap((delegate, closeJobResponse) -> { // Handle the response results.put("anomaly_detectors", closeJobResponse.isClosed()); if (machineLearningExtension.get().isDataFrameAnalyticsEnabled() == false) { - afterDataframesStopped.onResponse(new StopDataFrameAnalyticsAction.Response(true)); + delegate.onResponse(new StopDataFrameAnalyticsAction.Response(true)); return; } // Stop data frame analytics StopDataFrameAnalyticsAction.Request stopDataFramesReq = new StopDataFrameAnalyticsAction.Request("_all").setAllowNoMatch(true); - client.execute( - StopDataFrameAnalyticsAction.INSTANCE, - stopDataFramesReq, - ActionListener.wrap(afterDataframesStopped::onResponse, failure -> { - logger.warn( - "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true", - failure - ); - client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), afterDataframesStopped); - }) - ); - }, unsetResetModeListener::onFailure); - - // Close anomaly detection jobs - ActionListener afterDataFeedsStopped = ActionListener.wrap(datafeedResponse -> { + client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq, ActionListener.wrap(delegate::onResponse, failure -> { + logger.warn( + "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true", + failure + ); + client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), delegate); + })); + }).delegateFailureAndWrap((delegate, datafeedResponse) -> { // Handle the response results.put("datafeeds", datafeedResponse.isStopped()); if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - afterAnomalyDetectionClosed.onResponse(new CloseJobAction.Response(true)); + delegate.onResponse(new CloseJobAction.Response(true)); return; } CloseJobAction.Request closeJobsRequest = new CloseJobAction.Request().setAllowNoMatch(true).setJobId("_all"); @@ -2186,65 +2188,48 @@ public void cleanUpFeature( client.execute( KillProcessAction.INSTANCE, new KillProcessAction.Request("*"), - ActionListener.wrap( + delegate.delegateFailureAndWrap( // If successful, close and wait for jobs - success -> client.execute( + (l, success) -> client.execute( CloseJobAction.INSTANCE, closeJobsRequest, - ActionListener.wrap(afterAnomalyDetectionClosed::onResponse, failure -> { + ActionListener.wrap(l::onResponse, failure -> { logger.warn( "failed closing anomaly jobs for machine learning feature reset. Attempting with force=true", failure ); - client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), afterAnomalyDetectionClosed); + client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), l); }) - ), - unsetResetModeListener::onFailure + ) ) ); - }, unsetResetModeListener::onFailure); - - // Stop data feeds - ActionListener cancelSnapshotUpgradesListener = ActionListener.wrap( - cancelUpgradesResponse -> { - if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - afterDataFeedsStopped.onResponse(new StopDatafeedAction.Response(true)); - return; - } - StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true); - client.execute( - StopDatafeedAction.INSTANCE, - stopDatafeedsReq, - ActionListener.wrap(afterDataFeedsStopped::onResponse, failure -> { - logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure); - client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), afterDataFeedsStopped); - }) - ); - }, - unsetResetModeListener::onFailure - ); - - // Cancel model snapshot upgrades - ActionListener stopDeploymentsListener = ActionListener.wrap(acknowledgedResponse -> { + }).delegateFailureAndWrap((delegate, cancelUpgradesResponse) -> { + if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { + delegate.onResponse(new StopDatafeedAction.Response(true)); + return; + } + StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true); + client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq, ActionListener.wrap(delegate::onResponse, failure -> { + logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure); + client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), delegate); + })); + }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> { if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - cancelSnapshotUpgradesListener.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true)); + delegate.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true)); return; } CancelJobModelSnapshotUpgradeAction.Request cancelSnapshotUpgradesReq = new CancelJobModelSnapshotUpgradeAction.Request( "_all", "_all" ); - client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, cancelSnapshotUpgradesListener); - }, unsetResetModeListener::onFailure); - - // Stop all model deployments - ActionListener pipelineValidation = ActionListener.wrap(acknowledgedResponse -> { + client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, delegate); + }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> { if (trainedModelAllocationClusterServiceSetOnce.get() == null || machineLearningExtension.get().isNlpEnabled() == false) { - stopDeploymentsListener.onResponse(AcknowledgedResponse.TRUE); + delegate.onResponse(AcknowledgedResponse.TRUE); return; } - trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(stopDeploymentsListener); - }, unsetResetModeListener::onFailure); + trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(delegate); + }); // validate no pipelines are using machine learning models ActionListener afterResetModeSet = ActionListener.wrap(acknowledgedResponse -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java index 3df836e5f9043..bb7c79ff2ec07 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.utils.MlTaskParams; +import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; @@ -298,6 +299,10 @@ synchronized List findLongTimeUnassignedTasks(Instant now, PersistentTas if (task.getExecutorNode() == null) { final String taskName = task.getTaskName(); if (MlTasks.JOB_TASK_NAME.equals(taskName) || MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME.equals(taskName)) { + // Ignore failed tasks - they don't need to be assigned to a node + if (((MlTaskState) task.getState()).isFailed()) { + continue; + } final String mlId = ((MlTaskParams) task.getParams()).getMlId(); final TaskNameAndId key = new TaskNameAndId(taskName, mlId); final UnassignedTimeAndReportTime previousInfo = oldUnassignedInfoByTask.get(key); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 60993e12a2088..e4c73106852ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -65,50 +65,54 @@ public TransportDeleteCalendarEventAction( protected void doExecute(Task task, DeleteCalendarEventAction.Request request, ActionListener listener) { final String eventId = request.getEventId(); - ActionListener calendarListener = ActionListener.wrap(calendar -> { - GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId); - executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(getResponse -> { - if (getResponse.isExists() == false) { - listener.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]")); - return; - } - - Map source = getResponse.getSourceAsMap(); - String calendarId = (String) source.get(Calendar.ID.getPreferredName()); - if (calendarId == null) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName() - ) - ); - return; - } - - if (calendarId.equals(request.getCalendarId()) == false) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Event [" - + eventId - + "] has " - + Calendar.ID.getPreferredName() - + " [" - + calendarId - + "] which does not match the request " - + Calendar.ID.getPreferredName() - + " [" - + request.getCalendarId() - + "]" - ) - ); - return; - } - - deleteEvent(eventId, calendar, listener); - }, listener::onFailure)); - }, listener::onFailure); - // Get the calendar first so we check the calendar exists before checking the event exists - jobResultsProvider.calendar(request.getCalendarId(), calendarListener); + jobResultsProvider.calendar(request.getCalendarId(), listener.delegateFailureAndWrap((l, calendar) -> { + GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportGetAction.TYPE, + getRequest, + l.delegateFailureAndWrap((delegate, getResponse) -> { + if (getResponse.isExists() == false) { + delegate.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]")); + return; + } + + Map source = getResponse.getSourceAsMap(); + String calendarId = (String) source.get(Calendar.ID.getPreferredName()); + if (calendarId == null) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName() + ) + ); + return; + } + + if (calendarId.equals(request.getCalendarId()) == false) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Event [" + + eventId + + "] has " + + Calendar.ID.getPreferredName() + + " [" + + calendarId + + "] which does not match the request " + + Calendar.ID.getPreferredName() + + " [" + + request.getCalendarId() + + "]" + ) + ); + return; + } + + deleteEvent(eventId, calendar, delegate); + }) + ); + })); } private void deleteEvent(String eventId, Calendar calendar, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java index 64ad51fc0f722..49c6021a6ed8b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java @@ -84,19 +84,17 @@ private void forceDeleteDatafeed( ClusterState state, ActionListener listener ) { - ActionListener finalListener = ActionListener.wrap( - // use clusterService.state() here so that the updated state without the task is available - response -> datafeedManager.deleteDatafeed(request, clusterService.state(), listener), - listener::onFailure - ); - - ActionListener isolateDatafeedHandler = ActionListener.wrap( - response -> removeDatafeedTask(request, state, finalListener), - listener::onFailure - ); - IsolateDatafeedAction.Request isolateDatafeedRequest = new IsolateDatafeedAction.Request(request.getDatafeedId()); - executeAsyncWithOrigin(client, ML_ORIGIN, IsolateDatafeedAction.INSTANCE, isolateDatafeedRequest, isolateDatafeedHandler); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + IsolateDatafeedAction.INSTANCE, + isolateDatafeedRequest, + listener.delegateFailureAndWrap( + // use clusterService.state() here so that the updated state without the task is available + (l, response) -> datafeedManager.deleteDatafeed(request, clusterService.state(), l) + ).delegateFailureAndWrap((l, response) -> removeDatafeedTask(request, state, l)) + ); } private void removeDatafeedTask(DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index b28d37022e171..ad85f22873cce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -146,16 +146,15 @@ protected void doExecute( false, true, null, - ActionListener.wrap( - jobBuilders -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute(ActionRunnable.wrap(listener, l -> { + listener.delegateFailureAndWrap( + (delegate, jobBuilders) -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute(ActionRunnable.wrap(delegate, l -> { List jobs = jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()); String[] jobIds = jobs.stream().map(Job::getId).toArray(String[]::new); request.setExpandedJobIds(jobIds); List dataRemovers = createDataRemovers(jobs, taskId, anomalyDetectionAuditor); deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier); - })), - listener::onFailure + })) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 45bbd6256c205..ceae2a680feb0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -63,16 +63,16 @@ public TransportDeleteFilterAction( @Override protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); - jobConfigProvider.findJobsWithCustomRules(ActionListener.wrap(jobs -> { + jobConfigProvider.findJobsWithCustomRules(listener.delegateFailureAndWrap((delegate, jobs) -> { List currentlyUsedBy = findJobsUsingFilter(jobs, filterId); if (currentlyUsedBy.isEmpty() == false) { - listener.onFailure( + delegate.onFailure( ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.FILTER_CANNOT_DELETE, filterId, currentlyUsedBy)) ); } else { - deleteFilter(filterId, listener); + deleteFilter(filterId, delegate); } - }, listener::onFailure)); + })); } private static List findJobsUsingFilter(List jobs, String filterId) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index f3b0fcd669637..f694e85144b48 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -167,28 +167,23 @@ protected void masterOperation( } ); - ActionListener markAsDeletingListener = finalListener.delegateFailureAndWrap((delegate, response) -> { - if (request.isForce()) { - forceDeleteJob(parentTaskClient, request, state, delegate); - } else { - normalDeleteJob(parentTaskClient, request, state, delegate); + ActionListener datafeedDeleteListener = finalListener.delegateFailureAndWrap( + (delegate, response) -> { + if (request.isForce()) { + forceDeleteJob(parentTaskClient, request, state, delegate); + } else { + normalDeleteJob(parentTaskClient, request, state, delegate); + } } - }); - - ActionListener datafeedDeleteListener = ActionListener.wrap(response -> { + ).delegateFailureAndWrap((delegate, response) -> { auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); cancelResetTaskIfExists( request.getJobId(), - ActionListener.wrap( - r -> jobConfigProvider.updateJobBlockReason( - request.getJobId(), - new Blocked(Blocked.Reason.DELETE, taskId), - markAsDeletingListener - ), - finalListener::onFailure + delegate.delegateFailureAndWrap( + (l, r) -> jobConfigProvider.updateJobBlockReason(request.getJobId(), new Blocked(Blocked.Reason.DELETE, taskId), l) ) ); - }, finalListener::onFailure); + }); ActionListener jobExistsListener = ActionListener.wrap( response -> deleteDatafeedIfNecessary(request, datafeedDeleteListener), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index d19871d0e1b2f..bcf3c1f58cfa9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -110,18 +110,12 @@ protected void masterOperation( ) { logger.debug(() -> format("[%s] Request to delete trained model%s", request.getId(), request.isForce() ? " (force)" : "")); - ActionListener performDeletion = ActionListener.wrap( - ignored -> deleteModel(request, state, listener), - listener::onFailure - ); - String id = request.getId(); - - cancelDownloadTask(client, id, performDeletion, request.timeout()); + cancelDownloadTask(client, id, listener.delegateFailureAndWrap((l, ignored) -> deleteModel(request, state, l)), request.timeout()); } // package-private for testing - static void cancelDownloadTask(Client client, String modelId, ActionListener listener, TimeValue timeout) { + static void cancelDownloadTask(Client client, String modelId, ActionListener listener, TimeValue timeout) { logger.debug(() -> format("[%s] Checking if download task exists and cancelling it", modelId)); OriginSettingClient mlClient = new OriginSettingClient(client, ML_ORIGIN); @@ -139,7 +133,7 @@ static void cancelDownloadTask(Client client, String modelId, ActionListener null, taskListener); } static Set getReferencedModelKeys(IngestMetadata ingestMetadata, IngestService ingestService) { @@ -218,10 +212,7 @@ private void deleteModel(DeleteTrainedModelAction.Request request, ClusterState if (request.isForce()) { forceStopDeployment( request.getId(), - ActionListener.wrap( - stopDeploymentResponse -> deleteAliasesAndModel(request, modelAliases, listener), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, stopDeploymentResponse) -> deleteAliasesAndModel(request, modelAliases, l)) ); } else { listener.onFailure( @@ -250,13 +241,11 @@ private void deleteAliasesAndModel( ) { logger.debug(() -> "[" + request.getId() + "] Deleting model"); - ActionListener nameDeletionListener = ActionListener.wrap( - ack -> trainedModelProvider.deleteTrainedModel(request.getId(), ActionListener.wrap(r -> { + ActionListener nameDeletionListener = listener.delegateFailureAndWrap( + (delegate, ack) -> trainedModelProvider.deleteTrainedModel(request.getId(), delegate.delegateFailureAndWrap((l, r) -> { auditor.info(request.getId(), "trained model deleted"); - listener.onResponse(AcknowledgedResponse.TRUE); - }, listener::onFailure)), - - listener::onFailure + l.onResponse(AcknowledgedResponse.TRUE); + })) ); // No reason to update cluster state, simply delete the model @@ -294,11 +283,11 @@ private static void executeTaskCancellation( Client client, String modelId, TaskInfo taskInfo, - ActionListener listener, + ActionListener listener, TimeValue timeout ) { if (taskInfo != null) { - ActionListener cancelListener = ActionListener.wrap(listener::onResponse, e -> { + ActionListener cancelListener = ActionListener.wrap(listener::onResponse, e -> { Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause instanceof ResourceNotFoundException) { logger.debug(() -> format("[%s] Task no longer exists when attempting to cancel it", modelId)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 3865858f527b4..61db7f683f0f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -82,13 +82,11 @@ protected void doExecute( ActionListener listener ) { TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - ActionListener> resultsListener = ActionListener.wrap(unused -> { - EvaluateDataFrameAction.Response response = new EvaluateDataFrameAction.Response( - request.getEvaluation().getName(), - request.getEvaluation().getResults() - ); - listener.onResponse(response); - }, listener::onFailure); + ActionListener> resultsListener = listener.delegateFailureAndWrap( + (delegate, unused) -> delegate.onResponse( + new EvaluateDataFrameAction.Response(request.getEvaluation().getName(), request.getEvaluation().getResults()) + ) + ); // Create an immutable collection of parameters to be used by evaluation metrics. EvaluationParameters parameters = new EvaluationParameters(maxBuckets.get()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java index d19b67b52afe1..b1f5eda679006 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java @@ -147,9 +147,8 @@ private void explain( ).build(); extractedFieldsDetectorFactory.createFromSource( config, - ActionListener.wrap( - extractedFieldsDetector -> explain(parentTaskId, config, extractedFieldsDetector, listener), - listener::onFailure + listener.delegateFailureAndWrap( + (l, extractedFieldsDetector) -> explain(parentTaskId, config, extractedFieldsDetector, l) ) ); }); @@ -160,14 +159,8 @@ private void explain( ); extractedFieldsDetectorFactory.createFromSource( request.getConfig(), - ActionListener.wrap( - extractedFieldsDetector -> explain( - parentTaskId, - request.getConfig(), - extractedFieldsDetector, - responseHeaderPreservingListener - ), - responseHeaderPreservingListener::onFailure + responseHeaderPreservingListener.delegateFailureAndWrap( + (l, extractedFieldsDetector) -> explain(parentTaskId, request.getConfig(), extractedFieldsDetector, l) ) ); } @@ -189,13 +182,14 @@ private void explain( ); return; } - - ActionListener memoryEstimationListener = ActionListener.wrap( - memoryEstimation -> listener.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation)), - listener::onFailure + estimateMemoryUsage( + parentTaskId, + config, + fieldExtraction.v1(), + listener.delegateFailureAndWrap( + (l, memoryEstimation) -> l.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation)) + ) ); - - estimateMemoryUsage(parentTaskId, config, fieldExtraction.v1(), memoryEstimationListener); } /** @@ -220,11 +214,8 @@ private void estimateMemoryUsage( estimateMemoryTaskId, config, extractorFactory, - ActionListener.wrap( - result -> listener.onResponse( - new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk()) - ), - listener::onFailure + listener.delegateFailureAndWrap( + (l, result) -> l.onResponse(new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk())) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java index 6d183501d2043..5aed29fd6d152 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -96,15 +96,15 @@ protected void masterOperation( ML_ORIGIN, TransportUpdateAction.TYPE, updateRequest, - ActionListener.wrap(updateResponse -> chainedListener.onResponse(null), chainedListener::onFailure) + chainedListener.delegateFailureAndWrap((l, updateResponse) -> l.onResponse(null)) ); }); } - voidChainTaskExecutor.execute(ActionListener.wrap(aVoids -> { + voidChainTaskExecutor.execute(listener.delegateFailureAndWrap((l, aVoids) -> { logger.debug("finalized job [{}]", jobIdString); - listener.onResponse(AcknowledgedResponse.TRUE); - }, listener::onFailure)); + l.onResponse(AcknowledgedResponse.TRUE); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java index a5fe3ad67ca06..17f1459984736 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java @@ -66,10 +66,17 @@ protected void taskOperation( timeRangeBuilder.endTime(request.getEnd()); } paramsBuilder.forTimeRange(timeRangeBuilder.build()); - processManager.flushJob(task, paramsBuilder.build(), ActionListener.wrap(flushAcknowledgement -> { - listener.onResponse( - new FlushJobAction.Response(true, flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd()) - ); - }, listener::onFailure)); + processManager.flushJob( + task, + paramsBuilder.build(), + listener.delegateFailureAndWrap( + (l, flushAcknowledgement) -> l.onResponse( + new FlushJobAction.Response( + true, + flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd() + ) + ) + ) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java index e42c2b5d87f9e..58de04146aa52 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -41,7 +41,7 @@ public TransportGetBucketsAction( @Override protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) { - jobManager.jobExists(request.getJobId(), null, ActionListener.wrap(ok -> { + jobManager.jobExists(request.getJobId(), null, listener.delegateFailureAndWrap((delegate, ok) -> { BucketsQueryBuilder query = new BucketsQueryBuilder().expand(request.isExpand()) .includeInterim(request.isExcludeInterim() == false) .start(request.getStart()) @@ -62,14 +62,10 @@ protected void doExecute(Task task, GetBucketsAction.Request request, ActionList jobResultsProvider.buckets( request.getJobId(), query, - q -> listener.onResponse(new GetBucketsAction.Response(q)), - listener::onFailure, + q -> delegate.onResponse(new GetBucketsAction.Response(q)), + delegate::onFailure, client ); - - }, - listener::onFailure - - )); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index 3e35429d352c2..89527d2cd12d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -58,16 +58,15 @@ protected void doExecute( ActionListener listener ) { final String[] calendarId = Strings.splitStringByCommaToArray(request.getCalendarId()); - ActionListener calendarExistsListener = ActionListener.wrap(r -> { + checkCalendarExists(calendarId, listener.delegateFailureAndWrap((outerDelegate, r) -> { ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(request.getStart()) .end(request.getEnd()) .from(request.getPageParams().getFrom()) .size(request.getPageParams().getSize()) .calendarIds(calendarId); - ActionListener> eventsListener = ActionListener.wrap( - events -> listener.onResponse(new GetCalendarEventsAction.Response(events)), - listener::onFailure + ActionListener> eventsListener = outerDelegate.delegateFailureAndWrap( + (l, events) -> l.onResponse(new GetCalendarEventsAction.Response(events)) ); if (request.getJobId() != null) { @@ -78,25 +77,18 @@ protected void doExecute( }, jobNotFound -> { // is the request Id a group? - jobConfigProvider.groupExists(request.getJobId(), ActionListener.wrap(groupExists -> { + jobConfigProvider.groupExists(request.getJobId(), eventsListener.delegateFailureAndWrap((delegate, groupExists) -> { if (groupExists) { - jobResultsProvider.scheduledEventsForJob( - null, - Collections.singletonList(request.getJobId()), - query, - eventsListener - ); + jobResultsProvider.scheduledEventsForJob(null, Collections.singletonList(request.getJobId()), query, delegate); } else { - listener.onFailure(ExceptionsHelper.missingJobException(request.getJobId())); + delegate.onFailure(ExceptionsHelper.missingJobException(request.getJobId())); } - }, listener::onFailure)); + })); })); } else { jobResultsProvider.scheduledEvents(query, eventsListener); } - }, listener::onFailure); - - checkCalendarExists(calendarId, calendarExistsListener); + })); } private void checkCalendarExists(String[] calendarId, ActionListener listener) { @@ -107,7 +99,7 @@ private void checkCalendarExists(String[] calendarId, ActionListener li jobResultsProvider.calendars( CalendarQueryBuilder.builder().calendarIdTokens(calendarId), - ActionListener.wrap(c -> listener.onResponse(true), listener::onFailure) + listener.delegateFailureAndWrap((l, c) -> l.onResponse(true)) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java index bec0b86e77edb..eecc5999f842b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java @@ -83,7 +83,7 @@ protected void doExecute( searchResources( request, new TaskId(clusterService.localNode().getId(), task.getId()), - ActionListener.wrap(queryPage -> listener.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage)), listener::onFailure) + listener.delegateFailureAndWrap((l, queryPage) -> l.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 4d307546fda95..0ca5c706e5b8c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -54,7 +54,7 @@ protected void doExecute(Task task, GetFiltersAction.Request request, ActionList searchResources( request, new TaskId(clusterService.localNode().getId(), task.getId()), - ActionListener.wrap(filters -> listener.onResponse(new GetFiltersAction.Response(filters)), listener::onFailure) + listener.delegateFailureAndWrap((l, filters) -> l.onResponse(new GetFiltersAction.Response(filters))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 666e6bf478429..4ae6512fcaff4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -108,14 +108,13 @@ protected void doExecute(Task task, GetJobsStatsAction.Request request, ActionLi tasks, true, parentTaskId, - ActionListener.wrap(expandedIds -> { + finalListener.delegateFailureAndWrap((delegate, expandedIds) -> { request.setExpandedJobsIds(new ArrayList<>(expandedIds)); - ActionListener jobStatsListener = ActionListener.wrap( - response -> gatherStatsForClosedJobs(request, response, parentTaskId, finalListener), - finalListener::onFailure + ActionListener jobStatsListener = delegate.delegateFailureAndWrap( + (l, response) -> gatherStatsForClosedJobs(request, response, parentTaskId, l) ); super.doExecute(task, request, jobStatsListener); - }, finalListener::onFailure) + }) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java index 78ef2f815c0fb..ab5949412927c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java @@ -78,7 +78,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A clusterService.getClusterSettings(), mlMemoryTracker, settings, - ActionListener.wrap(autoscalingResources -> listener.onResponse(new Response(autoscalingResources)), listener::onFailure) + listener.delegateFailureAndWrap((l, autoscalingResources) -> l.onResponse(new Response(autoscalingResources))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java index a5cc23544fbc9..67838fcfa26df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -82,7 +82,7 @@ protected void doExecute( jobManager.jobExists( request.getJobId(), parentTaskId, - ActionListener.wrap(ok -> getModelSnapshots(request, parentTaskId, listener), listener::onFailure) + listener.delegateFailureAndWrap((l, ok) -> getModelSnapshots(request, parentTaskId, l)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index d9dfd0fb23eeb..b37f82e45ec49 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.Max; @@ -120,22 +120,6 @@ private void getOverallBuckets( ) { JobsContext jobsContext = JobsContext.build(jobs, request); - ActionListener> overallBucketsListener = ActionListener.wrap(overallBuckets -> { - listener.onResponse( - new GetOverallBucketsAction.Response(new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD)) - ); - }, listener::onFailure); - - ActionListener chunkedBucketSearcherListener = ActionListener.wrap(searcher -> { - if (searcher == null) { - listener.onResponse( - new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD)) - ); - return; - } - searcher.searchAndComputeOverallBuckets(overallBucketsListener); - }, listener::onFailure); - OverallBucketsProvider overallBucketsProvider = new OverallBucketsProvider( jobsContext.maxBucketSpan, request.getTopN(), @@ -144,7 +128,29 @@ private void getOverallBuckets( OverallBucketsProcessor overallBucketsProcessor = requiresAggregation(request, jobsContext.maxBucketSpan) ? new OverallBucketsAggregator(request.getBucketSpan()) : new OverallBucketsCollector(); - initChunkedBucketSearcher(request, jobsContext, overallBucketsProvider, overallBucketsProcessor, chunkedBucketSearcherListener); + initChunkedBucketSearcher( + request, + jobsContext, + overallBucketsProvider, + overallBucketsProcessor, + listener.delegateFailureAndWrap((l, searcher) -> { + if (searcher == null) { + l.onResponse( + new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD)) + ); + return; + } + searcher.searchAndComputeOverallBuckets( + l.delegateFailureAndWrap( + (ll, overallBuckets) -> ll.onResponse( + new GetOverallBucketsAction.Response( + new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD) + ) + ) + ) + ); + }) + ); } private static boolean requiresAggregation(GetOverallBucketsAction.Request request, TimeValue maxBucketSpan) { @@ -185,7 +191,7 @@ private void initChunkedBucketSearcher( ActionListener.wrap(searchResponse -> { long totalHits = searchResponse.getHits().getTotalHits().value; if (totalHits > 0) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); long earliestTime = Intervals.alignToFloor((long) min.value(), maxBucketSpanMillis); Max max = aggregations.get(LATEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java index 9bf18671e7c11..8fe7c3686dcb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java @@ -112,18 +112,18 @@ void preview(Task task, DataFrameAnalyticsConfig config, ActionListener { + extractedFieldsDetectorFactory.createFromSource(config, listener.delegateFailureAndWrap((delegate, extractedFieldsDetector) -> { DataFrameDataExtractor extractor = DataFrameDataExtractorFactory.createForSourceIndices( client, parentTaskId.toString(), config, extractedFieldsDetector.detect().v1() ).newExtractor(false); - extractor.preview(ActionListener.wrap(rows -> { + extractor.preview(delegate.delegateFailureAndWrap((l, rows) -> { List fieldNames = extractor.getFieldNames(); - listener.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); - }, listener::onFailure)); - }, listener::onFailure)); + l.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); + })); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 5ceb34bfc0510..3df8d36882ecc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilters; @@ -36,10 +35,10 @@ import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; @@ -49,7 +48,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Date; -import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; @@ -101,27 +99,26 @@ public TransportPreviewDatafeedAction( @Override protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) { TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - ActionListener datafeedConfigActionListener = ActionListener.wrap(datafeedConfig -> { + ActionListener datafeedConfigActionListener = listener.delegateFailureAndWrap((delegate, datafeedConfig) -> { if (request.getJobConfig() != null) { - previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, listener); + previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, delegate); return; } jobConfigProvider.getJob( datafeedConfig.getJobId(), parentTaskId, - ActionListener.wrap( - jobBuilder -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, listener), - listener::onFailure + delegate.delegateFailureAndWrap( + (l, jobBuilder) -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, l) ) ); - }, listener::onFailure); + }); if (request.getDatafeedConfig() != null) { datafeedConfigActionListener.onResponse(request.getDatafeedConfig()); } else { datafeedConfigProvider.getDatafeedConfig( request.getDatafeedId(), parentTaskId, - ActionListener.wrap(builder -> datafeedConfigActionListener.onResponse(builder.build()), listener::onFailure) + datafeedConfigActionListener.delegateFailureAndWrap((l, builder) -> l.onResponse(builder.build())) ); } } @@ -209,10 +206,11 @@ private void isDateNanos(DatafeedConfig datafeed, String timeField, ActionListen client, TransportFieldCapabilitiesAction.TYPE, fieldCapabilitiesRequest, - ActionListener.wrap(fieldCapsResponse -> { - Map timeFieldCaps = fieldCapsResponse.getField(timeField); - listener.onResponse(timeFieldCaps.containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)); - }, listener::onFailure) + listener.delegateFailureAndWrap( + (l, fieldCapsResponse) -> l.onResponse( + fieldCapsResponse.getField(timeField).containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE) + ) + ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java index d73b942e766cf..77bcc9dbcf7d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java @@ -136,18 +136,13 @@ protected void masterOperation( final DataFrameAnalyticsConfig config = request.getConfig(); - ActionListener sourceDestValidationListener = ActionListener.wrap( - aBoolean -> putValidatedConfig(config, request.masterNodeTimeout(), listener), - listener::onFailure - ); - sourceDestValidator.validate( clusterService.state(), config.getSource().getIndex(), config.getDest().getIndex(), null, SourceDestValidations.ALL_VALIDATIONS, - sourceDestValidationListener + listener.delegateFailureAndWrap((l, aBoolean) -> putValidatedConfig(config, request.masterNodeTimeout(), l)) ); } @@ -191,22 +186,20 @@ private void putValidatedConfig( } privRequest.indexPrivileges(indicesPrivileges); - ActionListener privResponseListener = ActionListener.wrap( - r -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener), - listener::onFailure + client.execute( + HasPrivilegesAction.INSTANCE, + privRequest, + listener.delegateFailureAndWrap( + (l, r) -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener) + ) ); - - client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); }); } else { updateDocMappingAndPutConfig( preparedForPutConfig, threadPool.getThreadContext().getHeaders(), masterNodeTimeout, - ActionListener.wrap( - finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig))) ); } } @@ -223,10 +216,7 @@ private void handlePrivsResponse( memoryCappedConfig, threadPool.getThreadContext().getHeaders(), masterNodeTimeout, - ActionListener.wrap( - finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig))) ); } else { XContentBuilder builder = JsonXContent.contentBuilder(); @@ -254,13 +244,13 @@ private void updateDocMappingAndPutConfig( TimeValue masterNodeTimeout, ActionListener listener ) { - ActionListener auditingListener = ActionListener.wrap(finalConfig -> { + ActionListener auditingListener = listener.delegateFailureAndWrap((delegate, finalConfig) -> { auditor.info( finalConfig.getId(), Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_AUDIT_CREATED, finalConfig.getAnalysis().getWriteableName()) ); - listener.onResponse(finalConfig); - }, listener::onFailure); + delegate.onResponse(finalConfig); + }); ClusterState clusterState = clusterService.state(); if (clusterState == null) { @@ -274,7 +264,7 @@ private void updateDocMappingAndPutConfig( client, clusterState, masterNodeTimeout, - ActionListener.wrap(unused -> configProvider.put(config, headers, masterNodeTimeout, auditingListener), listener::onFailure), + auditingListener.delegateFailureAndWrap((l, unused) -> configProvider.put(config, headers, masterNodeTimeout, l)), MlConfigIndex.CONFIG_INDEX_MAPPINGS_VERSION ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index d6e52b6de1fd4..edbb9f297c8cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -237,38 +237,24 @@ protected void masterOperation( return; } - ActionListener finalResponseAction = ActionListener.wrap( - (configToReturn) -> finalResponseListener.onResponse(new Response(configToReturn)), - finalResponseListener::onFailure - ); - - ActionListener verifyClusterAndModelArchitectures = ActionListener.wrap( - (configToReturn) -> verifyMlNodesAndModelArchitectures(configToReturn, client, threadPool, finalResponseAction), - finalResponseListener::onFailure - ); - - ActionListener finishedStoringListener = ActionListener.wrap(bool -> { + var isPackageModel = config.isPackagedModel(); + ActionListener checkStorageIndexSizeListener = finalResponseListener.delegateFailureAndWrap((delegate, bool) -> { TrainedModelConfig configToReturn = trainedModelConfig.clearDefinition().build(); if (modelPackageConfigHolder.get() != null) { triggerModelFetchIfNecessary( configToReturn.getModelId(), modelPackageConfigHolder.get(), request.isWaitForCompletion(), - ActionListener.wrap( - downloadTriggered -> verifyClusterAndModelArchitectures.onResponse(configToReturn), - finalResponseListener::onFailure - ) + delegate.delegateFailureAndWrap((l, cfg) -> l.onResponse(new Response(cfg))) + .delegateFailureAndWrap( + (l, cfg) -> verifyMlNodesAndModelArchitectures(cfg, client, threadPool, l) + ) + .delegateFailureAndWrap((l, downloadTriggered) -> l.onResponse(configToReturn)) ); } else { - finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + delegate.onResponse(new PutTrainedModelAction.Response(configToReturn)); } - }, finalResponseListener::onFailure); - - var isPackageModel = config.isPackagedModel(); - ActionListener checkStorageIndexSizeListener = ActionListener.wrap( - r -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), finishedStoringListener, isPackageModel), - finalResponseListener::onFailure - ); + }).delegateFailureAndWrap((l, r) -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), l, isPackageModel)); ActionListener tagsModelIdCheckListener = ActionListener.wrap(r -> { if (TrainedModelType.PYTORCH.equals(trainedModelConfig.getModelType())) { @@ -394,14 +380,21 @@ static void checkForExistingTask( ActionListener storeModelListener, TimeValue timeout ) { - TaskRetriever.getDownloadTaskInfo(client, modelId, isWaitForCompletion, ActionListener.wrap(taskInfo -> { - if (taskInfo != null) { - getModelInformation(client, modelId, sendResponseListener); - } else { - // no task exists so proceed with creating the model - storeModelListener.onResponse(null); - } - }, sendResponseListener::onFailure), timeout); + TaskRetriever.getDownloadTaskInfo( + client, + modelId, + isWaitForCompletion, + timeout, + () -> "Timed out waiting for model download to complete", + ActionListener.wrap(taskInfo -> { + if (taskInfo != null) { + getModelInformation(client, modelId, sendResponseListener); + } else { + // no task exists so proceed with creating the model + storeModelListener.onResponse(null); + } + }, sendResponseListener::onFailure) + ); } private static void getModelInformation(Client client, String modelId, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 0a5641836df4a..2f2a76a1df1e2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -577,21 +577,29 @@ private static void step1CheckForDownloadTask( String modelId, ActionListener nextStepListener ) { - TaskRetriever.getDownloadTaskInfo(mlOriginClient, modelId, timeout != null, ActionListener.wrap(taskInfo -> { - if (taskInfo == null) { - nextStepListener.onResponse(null); - } else { - failOrRespondWith0( - () -> new ElasticsearchStatusException( - Messages.getMessage(Messages.MODEL_DOWNLOAD_IN_PROGRESS, modelId), - RestStatus.REQUEST_TIMEOUT - ), - errorIfDefinitionIsMissing, - modelId, - failureListener - ); - } - }, failureListener::onFailure), timeout); + // check task is present, do not wait for completion + TaskRetriever.getDownloadTaskInfo( + mlOriginClient, + modelId, + timeout != null, + timeout, + () -> Messages.getMessage(Messages.MODEL_DOWNLOAD_IN_PROGRESS, modelId), + ActionListener.wrap(taskInfo -> { + if (taskInfo == null) { + nextStepListener.onResponse(null); + } else { + failOrRespondWith0( + () -> new ElasticsearchStatusException( + Messages.getMessage(Messages.MODEL_DOWNLOAD_IN_PROGRESS, modelId), + RestStatus.REQUEST_TIMEOUT + ), + errorIfDefinitionIsMissing, + modelId, + failureListener + ); + } + }, failureListener::onFailure) + ); } private static void failOrRespondWith0( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java index 780841880a6c1..88c19fc670794 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.aggs; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InvalidAggregationPathException; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; @@ -34,7 +34,7 @@ public static InvalidAggregationPathException invalidPathException(List * @param aggregations The aggregations * @return The double values and doc_counts extracted from the path if the bucket path exists and the value is a valid number */ - public static Optional extractDoubleBucketedValues(String bucketPath, Aggregations aggregations) { + public static Optional extractDoubleBucketedValues(String bucketPath, InternalAggregations aggregations) { return extractDoubleBucketedValues(bucketPath, aggregations, BucketHelpers.GapPolicy.INSERT_ZEROS, false); } @@ -50,7 +50,7 @@ public static Optional extractDoubleBucketedValues(String bu */ public static Optional extractDoubleBucketedValues( String bucketPath, - Aggregations aggregations, + InternalAggregations aggregations, BucketHelpers.GapPolicy gapPolicy, boolean excludeLastBucket ) { @@ -101,7 +101,7 @@ public static Optional extractDoubleBucketedValues( public static Optional extractBucket( String bucketPath, - Aggregations aggregations, + InternalAggregations aggregations, int bucket ) { List parsedPath = AggregationPath.parse(bucketPath).getPathElementsAsStringList(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 9b337d559854a..6da883e6b9858 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; @@ -175,7 +174,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } @@ -323,7 +322,7 @@ public InternalAggregation reduce(List aggregations, Aggreg for (InternalAggregation aggregation : aggregations) { InternalCategorizationAggregation categorizationAggregation = (InternalCategorizationAggregation) aggregation; for (Bucket bucket : categorizationAggregation.buckets) { - categorizer.mergeWireCategory(bucket.serializableCategory).addSubAggs((InternalAggregations) bucket.getAggregations()); + categorizer.mergeWireCategory(bucket.serializableCategory).addSubAggs(bucket.getAggregations()); if (reduceContext.isCanceled().get()) { break; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java index eebe4e49776e5..650c02af00837 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java @@ -17,7 +17,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; @@ -93,7 +92,7 @@ public ChangePointAggregator(String name, String bucketsPath, Map maybeBucketsValue = extractDoubleBucketedValues( bucketsPaths()[0], aggregations, @@ -137,7 +136,7 @@ public InternalAggregation doReduce(Aggregations aggregations, AggregationReduce ChangePointBucket changePointBucket = null; if (change.changePoint() >= 0) { changePointBucket = extractBucket(bucketsPaths()[0], aggregations, change.changePoint()).map( - b -> new ChangePointBucket(b.getKey(), b.getDocCount(), (InternalAggregations) b.getAggregations()) + b -> new ChangePointBucket(b.getKey(), b.getDocCount(), b.getAggregations()) ).orElse(null); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index 4f4db1eed2341..c97166ac6fd80 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.xcontent.XContentBuilder; @@ -68,7 +67,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java index 02386acbd6134..97e803b5961a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java @@ -9,8 +9,8 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; @@ -33,7 +33,7 @@ public BucketCorrelationAggregator( } @Override - public InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) { + public InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) { CountCorrelationIndicator bucketPathValue = MlAggsHelper.extractDoubleBucketedValues(bucketsPaths()[0], aggregations) .map( doubleBucketValues -> new CountCorrelationIndicator( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java index ea01f07146ea6..fd5c66399c72d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java @@ -26,7 +26,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; public class InferencePipelineAggregator extends PipelineAggregator { @@ -102,12 +101,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe inference = new WarningInferenceResults(e.getMessage()); } - final List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); - + final List aggs = new ArrayList<>(bucket.getAggregations().asList()); InternalInferenceAggregation aggResult = new InternalInferenceAggregation(name(), metadata(), inference); aggs.add(aggResult); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java index 518b76aae3732..f26dadf5ece22 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java @@ -13,8 +13,8 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.xpack.ml.aggs.DoubleArray; import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; @@ -224,7 +224,7 @@ private static double sidedKSStat(double a, double b, Alternative alternative) { } @Override - public InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) { + public InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) { Optional maybeBucketsValue = extractDoubleBucketedValues(bucketsPaths()[0], aggregations).map( bucketValue -> { double[] values = new double[bucketValue.getValues().length + 1]; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java index b32bdf6dcbccf..3e196e1a12723 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.core.ml.action.PostDataAction; import org.elasticsearch.xpack.core.ml.annotations.Annotation; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.ml.annotations.AnnotationPersister; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetector; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java index 0374dbf8eb1fe..cc6ea64b6a42e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java @@ -20,10 +20,10 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.utils.Intervals; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import java.time.ZonedDateTime; import java.util.Collections; @@ -129,7 +129,7 @@ private Map checkCurrentBucketEventCount(long start, long end) { new DateHistogramAggregationBuilder(DATE_BUCKETS).fixedInterval(new DateHistogramInterval(bucketSpan + "ms")) .field(timeField) ) - .query(ExtractorUtils.wrapInTimeRangeQuery(datafeedQuery, timeField, start, end)) + .query(DataExtractorUtils.wrapInTimeRangeQuery(datafeedQuery, timeField, start, end)) .runtimeMappings(runtimeMappings); SearchRequest searchRequest = new SearchRequest(datafeedIndices).source(searchSourceBuilder).indicesOptions(indicesOptions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/DataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java similarity index 97% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/DataExtractor.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java index ba2043a17767f..991916333f4cf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/DataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.core.ml.datafeed.extractor; +package org.elasticsearch.xpack.ml.datafeed.extractor; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.SearchResponse; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java index ca6f138967bbe..3175891aa4d6e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java new file mode 100644 index 0000000000000..0f6ae6f90fb52 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.datafeed.extractor; + +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; + +/** + * Utility methods for various DataExtractor implementations. + */ +public final class DataExtractorUtils { + + private static final String EPOCH_MILLIS = "epoch_millis"; + + /** + * Combines a user query with a time range query. + */ + public static QueryBuilder wrapInTimeRangeQuery(QueryBuilder userQuery, String timeField, long start, long end) { + QueryBuilder timeQuery = new RangeQueryBuilder(timeField).gte(start).lt(end).format(EPOCH_MILLIS); + return new BoolQueryBuilder().filter(userQuery).filter(timeQuery); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java index fd57419abaa83..4cd5379d8fe3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java @@ -14,13 +14,15 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigUtils; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -95,7 +97,7 @@ public Result next() throws IOException { SearchInterval searchInterval = new SearchInterval(context.start, context.end); if (aggregationToJsonProcessor == null) { - Aggregations aggs = search(); + InternalAggregations aggs = search(); if (aggs == null) { hasNext = false; return new Result(searchInterval, Optional.empty()); @@ -117,7 +119,7 @@ public Result next() throws IOException { ); } - private Aggregations search() { + private InternalAggregations search() { LOGGER.debug("[{}] Executing aggregated search", context.jobId); T searchRequest = buildSearchRequest(buildBaseSearchSource()); assert searchRequest.request().allowPartialSearchResults() == false; @@ -132,7 +134,7 @@ private Aggregations search() { } } - private void initAggregationProcessor(Aggregations aggs) throws IOException { + private void initAggregationProcessor(InternalAggregations aggs) throws IOException { aggregationToJsonProcessor = new AggregationToJsonProcessor( context.timeField, context.fields, @@ -151,10 +153,10 @@ private SearchSourceBuilder buildBaseSearchSource() { // For derivative aggregations the first bucket will always be null // so query one extra histogram bucket back and hope there is data // in that bucket - long histogramSearchStartTime = Math.max(0, context.start - ExtractorUtils.getHistogramIntervalMillis(context.aggs)); + long histogramSearchStartTime = Math.max(0, context.start - DatafeedConfigUtils.getHistogramIntervalMillis(context.aggs)); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0) - .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end)); + .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end)); if (context.runtimeMappings.isEmpty() == false) { searchSourceBuilder.runtimeMappings(context.runtimeMappings); @@ -166,11 +168,11 @@ private SearchSourceBuilder buildBaseSearchSource() { protected abstract T buildSearchRequest(SearchSourceBuilder searchRequestBuilder); - private static Aggregations validateAggs(@Nullable Aggregations aggs) { + private static InternalAggregations validateAggs(@Nullable InternalAggregations aggs) { if (aggs == null) { return null; } - List aggsAsList = aggs.asList(); + List aggsAsList = aggs.asList(); if (aggsAsList.isEmpty()) { return null; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java index e815d48b5202a..d1133672945ac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java @@ -13,10 +13,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.Intervals; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; public record AggregationDataExtractorFactory( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index 612860efee549..5c9711a6e5d8b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -11,7 +11,8 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; @@ -89,7 +90,7 @@ class AggregationToJsonProcessor { this.compositeAggDateValueSourceName = compositeAggDateValueSourceName; } - public void process(Aggregations aggs) throws IOException { + public void process(InternalAggregations aggs) throws IOException { processAggs(0, aggs.asList()); } @@ -102,7 +103,7 @@ public void process(Aggregations aggs) throws IOException { *
  • {@link Percentiles}
  • * */ - private void processAggs(long docCount, List aggregations) throws IOException { + private void processAggs(long docCount, List aggregations) throws IOException { if (aggregations.isEmpty()) { // This means we reached a bucket aggregation without sub-aggs. Thus, we can flush the path written so far. queueDocToWrite(keyValuePairs, docCount); @@ -230,7 +231,7 @@ private void processDateHistogram(Histogram agg) throws IOException { } } - List childAggs = bucket.getAggregations().asList(); + List childAggs = bucket.getAggregations().asList(); processAggs(bucket.getDocCount(), childAggs); keyValuePairs.remove(timeField); } @@ -269,7 +270,7 @@ private void processCompositeAgg(CompositeAggregation agg) throws IOException { } Collection addedFields = processCompositeAggBucketKeys(bucket.getKey()); - List childAggs = bucket.getAggregations().asList(); + List childAggs = bucket.getAggregations().asList(); processAggs(bucket.getDocCount(), childAggs); keyValuePairs.remove(timeField); for (String fieldName : addedFields) { @@ -335,7 +336,7 @@ boolean bucketAggContainsRequiredAgg(MultiBucketsAggregation aggregation) { } boolean foundRequiredAgg = false; - List aggs = asList(aggregation.getBuckets().get(0).getAggregations()); + List aggs = asList(aggregation.getBuckets().get(0).getAggregations()); for (Aggregation agg : aggs) { if (fields.contains(agg.getName())) { foundRequiredAgg = true; @@ -484,7 +485,7 @@ public long getKeyValueCount() { return keyValueWrittenCount; } - private static List asList(@Nullable Aggregations aggs) { + private static List asList(@Nullable InternalAggregations aggs) { return aggs == null ? Collections.emptyList() : aggs.asList(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java index d4bd75c92eb18..0dfdd9897737e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java @@ -12,16 +12,17 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigUtils; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.core.ml.utils.Intervals; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -70,7 +71,7 @@ class CompositeAggregationDataExtractor implements DataExtractor { this.context = Objects.requireNonNull(dataExtractorContext); this.timingStatsReporter = Objects.requireNonNull(timingStatsReporter); this.requestBuilder = Objects.requireNonNull(requestBuilder); - this.interval = ExtractorUtils.getHistogramIntervalMillis(compositeAggregationBuilder); + this.interval = DatafeedConfigUtils.getHistogramIntervalMillis(compositeAggregationBuilder); this.hasNext = true; } @@ -107,7 +108,7 @@ public Result next() throws IOException { } SearchInterval searchInterval = new SearchInterval(context.start, context.end); - Aggregations aggs = search(); + InternalAggregations aggs = search(); if (aggs == null) { LOGGER.trace(() -> "[" + context.jobId + "] extraction finished"); hasNext = false; @@ -117,7 +118,7 @@ public Result next() throws IOException { return new Result(searchInterval, Optional.of(processAggs(aggs))); } - private Aggregations search() { + private InternalAggregations search() { // Compare to the normal aggregation implementation, this search does not search for the previous bucket's data. // For composite aggs, since it is scrolling, it is not really possible to know the previous pages results in the current page. // Aggregations like derivative cannot work within composite aggs, for now. @@ -127,7 +128,7 @@ private Aggregations search() { () -> format("[%s] Executing composite aggregated search from [%s] to [%s]", context.jobId, context.start, context.end) ); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0) - .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end)); + .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end)); if (context.runtimeMappings.isEmpty() == false) { searchSourceBuilder.runtimeMappings(context.runtimeMappings); @@ -141,7 +142,7 @@ private Aggregations search() { try { LOGGER.trace(() -> "[" + context.jobId + "] Search composite response was obtained"); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { return null; } @@ -174,7 +175,7 @@ protected SearchResponse executeSearchRequest(ActionRequestBuilder 0) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); earliestTime = (long) min.value(); Max max = aggregations.get(LATEST_TIME); @@ -269,7 +285,7 @@ private DataSummary newAggregatedDataSummary() { LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); // This can happen if all the indices the datafeed is searching are deleted after it started. // Note that unlike the scrolled data summary method above we cannot check for this situation // by checking for zero hits, because aggregations that work on rollups return zero hits even @@ -287,7 +303,7 @@ private DataSummary newAggregatedDataSummary() { private SearchSourceBuilder rangeSearchBuilder() { return new SearchSourceBuilder().size(0) - .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end)) + .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end)) .runtimeMappings(context.runtimeMappings) .aggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField)) .aggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index d0fbeb03150ed..b4141ec632d3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -11,10 +11,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.Intervals; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import java.util.Objects; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 4cfcf6509faa0..0caa59fae914b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -19,14 +19,15 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import java.io.IOException; @@ -126,8 +127,7 @@ protected InputStream initScroll(long startTimestamp) throws IOException { logger.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - return processAndConsumeSearchHits(hits); + return processAndConsumeSearchHits(searchResponse.getHits()); } finally { searchResponse.decRef(); } @@ -158,7 +158,7 @@ protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequest private SearchRequestBuilder buildSearchRequest(long start) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(context.scrollSize) .sort(context.extractedFields.timeField(), SortOrder.ASC) - .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.extractedFields.timeField(), start, context.end)) + .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.extractedFields.timeField(), start, context.end)) .runtimeMappings(context.runtimeMappings); SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client).setScroll(SCROLL_TIMEOUT) @@ -184,9 +184,9 @@ private SearchRequestBuilder buildSearchRequest(long start) { /** * IMPORTANT: This is not an idempotent method. This method changes the input array by setting each element to null. */ - private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOException { + private InputStream processAndConsumeSearchHits(SearchHits hits) throws IOException { - if (hits == null || hits.length == 0) { + if (hits.getHits().length == 0) { hasNext = false; clearScroll(); return null; @@ -194,11 +194,10 @@ private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOExcep BytesStreamOutput outputStream = new BytesStreamOutput(); - SearchHit lastHit = hits[hits.length - 1]; + SearchHit lastHit = hits.getAt(hits.getHits().length - 1); lastTimestamp = context.extractedFields.timeFieldValue(lastHit); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) { - for (int i = 0; i < hits.length; i++) { - SearchHit hit = hits[i]; + for (SearchHit hit : hits) { if (isCancelled) { Long timestamp = context.extractedFields.timeFieldValue(hit); if (timestamp != null) { @@ -212,9 +211,6 @@ private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOExcep } } hitProcessor.process(hit); - // hack to remove the reference from object. This object can be big and consume alot of memory. - // We are removing it as soon as we process it. - hits[i] = null; } } return outputStream.bytes().streamInput(); @@ -237,8 +233,7 @@ private InputStream continueScroll() throws IOException { logger.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - return processAndConsumeSearchHits(hits); + return processAndConsumeSearchHits(searchResponse.getHits()); } finally { if (searchResponse != null) { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index 7d3a063e3435c..658e1fd56f627 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -19,11 +19,11 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlStrings; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import java.util.Map; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 6c3fb28fe2c83..c890ab599c380 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; @@ -148,20 +149,20 @@ public void preview(ActionListener> listener) { client, TransportSearchAction.TYPE, searchRequestBuilder.request(), - ActionListener.wrap(searchResponse -> { + listener.delegateFailureAndWrap((delegate, searchResponse) -> { if (searchResponse.getHits().getHits().length == 0) { - listener.onResponse(Collections.emptyList()); + delegate.onResponse(Collections.emptyList()); return; } - final SearchHit[] hits = searchResponse.getHits().getHits(); - List rows = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String[] extractedValues = extractValues(hit); - rows.add(extractedValues == null ? new Row(null, hit, true) : new Row(extractedValues, hit, false)); + List rows = new ArrayList<>(searchResponse.getHits().getHits().length); + for (SearchHit hit : searchResponse.getHits().getHits()) { + var unpooled = hit.asUnpooled(); + String[] extractedValues = extractValues(unpooled); + rows.add(extractedValues == null ? new Row(null, unpooled, true) : new Row(extractedValues, unpooled, false)); } - listener.onResponse(rows); - }, listener::onFailure) + delegate.onResponse(rows); + }) ); } @@ -251,8 +252,8 @@ private List processSearchResponse(SearchResponse searchResponse) { return null; } - SearchHit[] hits = searchResponse.getHits().getHits(); - List rows = new ArrayList<>(hits.length); + SearchHits hits = searchResponse.getHits(); + List rows = new ArrayList<>(hits.getHits().length); for (SearchHit hit : hits) { if (isCancelled) { hasNext = false; @@ -317,12 +318,13 @@ private String[] extractProcessedValue(ProcessedField processedField, SearchHit } private Row createRow(SearchHit hit) { - String[] extractedValues = extractValues(hit); + var unpooled = hit.asUnpooled(); + String[] extractedValues = extractValues(unpooled); if (extractedValues == null) { - return new Row(null, hit, true); + return new Row(null, unpooled, true); } boolean isTraining = trainTestSplitter.get().isTraining(extractedValues); - Row row = new Row(extractedValues, hit, isTraining); + Row row = new Row(extractedValues, unpooled, isTraining); LOGGER.trace( () -> format( "[%s] Extracted row: sort key = [%s], is_training = [%s], values = %s", @@ -393,11 +395,8 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio client, TransportSearchAction.TYPE, searchRequestBuilder.request(), - ActionListener.wrap( - searchResponse -> dataSummaryActionListener.onResponse( - new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields) - ), - dataSummaryActionListener::onFailure + dataSummaryActionListener.delegateFailureAndWrap( + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java index 1d78ad22f3f85..49e25c95713ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.metrics.Cardinality; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; @@ -156,7 +156,7 @@ private static void buildFieldCardinalitiesMap( SearchResponse searchResponse, ActionListener> listener ) { - Aggregations aggs = searchResponse.getAggregations(); + InternalAggregations aggs = searchResponse.getAggregations(); if (aggs == null) { listener.onFailure(ExceptionsHelper.serverError("Unexpected null response when gathering field cardinalities")); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java index bd37706622187..9e2db58befdbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java @@ -61,7 +61,7 @@ protected FieldSortBuilder sortField() { @Override protected SearchHit map(SearchHit hit) { - return hit; + return hit.asUnpooled(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java index 7eef0e526eac3..2012ca87578b0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java @@ -12,7 +12,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.license.License; @@ -157,7 +157,7 @@ private CountDownLatch storeTrainedModelDoc(TrainedModelDefinitionDoc trainedMod CountDownLatch latch = new CountDownLatch(1); // Latch is attached to this action as it is the last one to execute. - ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { + ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { if (refreshed != null) { LOGGER.debug(() -> "[" + analytics.getId() + "] refreshed inference index after model store"); } @@ -210,7 +210,7 @@ private CountDownLatch storeTrainedModelMetadata(TrainedModelMetadata trainedMod CountDownLatch latch = new CountDownLatch(1); // Latch is attached to this action as it is the last one to execute. - ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { + ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { if (refreshed != null) { LOGGER.debug(() -> "[" + analytics.getId() + "] refreshed inference index after model metadata store"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java index 1b6818a8727f3..0c693ff2d34f4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.tasks.TaskId; @@ -76,7 +76,7 @@ public final void execute(ActionListener listener) { protected abstract void doExecute(ActionListener listener); - protected void refreshDestAsync(ActionListener refreshListener) { + protected void refreshDestAsync(ActionListener refreshListener) { ParentTaskAssigningClient parentTaskClient = parentTaskClient(); executeWithHeadersAsync( config.getHeaders(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java index 8adf5b3f0621a..9e56387ed773e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.dataframe.steps; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; @@ -63,7 +63,7 @@ protected void doExecute(ActionListener listener) { listener::onFailure ); - ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { + ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { // TODO This could fail with errors. In that case we get stuck with the copied index. // We could delete the index in case of failure or we could try building the factory before reindexing // to catch the error early on. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java index 7b27090dc302d..dbf1f3e7be3d9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java @@ -13,10 +13,10 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -60,7 +60,7 @@ public Name name() { @Override protected void doExecute(ActionListener listener) { - ActionListener refreshListener = ActionListener.wrap( + ActionListener refreshListener = ActionListener.wrap( refreshResponse -> listener.onResponse(new StepResponse(false)), listener::onFailure ); @@ -89,7 +89,7 @@ private void indexDataCounts(ActionListener listener) { } } - private void refreshIndices(ActionListener listener) { + private void refreshIndices(ActionListener listener) { RefreshRequest refreshRequest = new RefreshRequest( AnomalyDetectorsIndex.jobStateIndexPattern(), MlStatsIndex.indexPattern(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 65ac2b678d93b..ad005e6d9ae6c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -11,9 +11,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; @@ -85,7 +85,7 @@ protected void doExecute(ActionListener listener) { } }, listener::onFailure); - ActionListener refreshDestListener = ActionListener.wrap( + ActionListener refreshDestListener = ActionListener.wrap( refreshResponse -> searchIfTestDocsExist(testDocsExistListener), listener::onFailure ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java index 1ca78df1fad3d..0ccdd1eb64601 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java @@ -12,8 +12,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -278,7 +278,7 @@ public void cancel(String reason, TimeValue timeout) { // We need to cancel the reindexing task within context with ML origin as we started the task // from the same context - CancelTasksResponse cancelReindexResponse = cancelTaskWithinMlOriginContext(cancelReindex); + ListTasksResponse cancelReindexResponse = cancelTaskWithinMlOriginContext(cancelReindex); Throwable firstError = null; if (cancelReindexResponse.getNodeFailures().isEmpty() == false) { @@ -296,7 +296,7 @@ public void cancel(String reason, TimeValue timeout) { } } - private CancelTasksResponse cancelTaskWithinMlOriginContext(CancelTasksRequest cancelTasksRequest) { + private ListTasksResponse cancelTaskWithinMlOriginContext(CancelTasksRequest cancelTasksRequest) { final ThreadContext threadContext = client.threadPool().getThreadContext(); try (ThreadContext.StoredContext ignore = threadContext.stashWithOrigin(ML_ORIGIN)) { return client.admin().cluster().cancelTasks(cancelTasksRequest).actionGet(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index ebe4295f8efbf..3ef2affa5d399 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -14,7 +14,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; @@ -101,7 +101,7 @@ private TrainTestSplitter createStratifiedSplitter(Classification classification searchRequestBuilder::get ); try { - Aggregations aggs = searchResponse.getAggregations(); + InternalAggregations aggs = searchResponse.getAggregations(); Terms terms = aggs.get(aggName); Map classCounts = new HashMap<>(); for (Terms.Bucket bucket : terms.getBuckets()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index a1142796558f4..ef8af6af445fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -142,13 +142,9 @@ private static void copyAssignments( for (Map.Entry assignment : nodeAssignments.entrySet()) { AssignmentPlan.Node originalNode = originalNodeById.get(assignment.getKey().id()); dest.assignModelToNode(m, originalNode, assignment.getValue()); - if (m.currentAllocationsByNodeId().containsKey(originalNode.id())) { - // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - long requiredMemory = m.estimateMemoryUsageBytes(m.currentAllocationsByNodeId().get(originalNode.id())); - dest.accountMemory(m, originalNode, requiredMemory); - } + // As the node has all its available memory we need to manually account memory of models with + // current allocations. + dest.accountMemory(m, originalNode); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java index 026b433a8c2d4..98988ffa11055 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java @@ -68,7 +68,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) { AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { // As the model/node objects the assignment plan are the modified ones, // they will not match the models/nodes members we have in this class. - // Therefore, we build a lookup table based on the ids so we can merge the plan + // Therefore, we build a lookup table based on the ids, so we can merge the plan // with its preserved allocations. final Map, Integer> plannedAssignmentsByModelNodeIdPair = new HashMap<>(); for (Deployment m : assignmentPlan.models()) { @@ -80,7 +80,6 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { AssignmentPlan.Builder mergedPlanBuilder = AssignmentPlan.builder(nodes, deployments); for (Node n : nodes) { - // TODO (#101612) Should the first loop happen in the builder constructor? for (Deployment deploymentAllocationsToPreserve : deployments) { // if the model m is already allocated on the node n and I want to preserve this allocation diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java index d9cb0f08a6cd0..123c728587604 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java @@ -401,8 +401,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio if (allocations <= 0) { return this; } - if (/*isAlreadyAssigned(deployment, node) == false - &&*/ requiredMemory > remainingNodeMemory.get(node)) { + if (requiredMemory > remainingNodeMemory.get(node)) { throw new IllegalArgumentException( "not enough memory on node [" + node.id() @@ -448,13 +447,14 @@ private static int getCurrentAllocations(Deployment m, Node n) { } public void accountMemory(Deployment m, Node n) { - // TODO (#101612) remove or refactor unused method - long requiredMemory = getDeploymentMemoryRequirement(m, n, getCurrentAllocations(m, n)); - accountMemory(m, n, requiredMemory); + if (m.currentAllocationsByNodeId().containsKey(n.id())) { + int allocations = m.currentAllocationsByNodeId().get(n.id()); + long requiredMemory = m.estimateMemoryUsageBytes(allocations); + accountMemory(m, n, requiredMemory); + } } - public void accountMemory(Deployment m, Node n, long requiredMemory) { - // TODO (#101612) computation of required memory should be done internally + private void accountMemory(Deployment m, Node n, long requiredMemory) { remainingNodeMemory.computeIfPresent(n, (k, v) -> v - requiredMemory); if (remainingNodeMemory.containsKey(n) && remainingNodeMemory.get(n) < 0) { throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.id() + "]"); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java index 8bdc99998a0c2..81696cd20d922 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java @@ -310,8 +310,6 @@ private void unassignOversizedModels(Node n) { private AssignmentPlan toPlan() { AssignmentPlan.Builder builder = AssignmentPlan.builder(nodes, deployments); for (Map.Entry, Integer> assignment : tryAssigningRemainingCores().entrySet()) { - // TODO (#101612) The model should be assigned to the node only when it is possible. This means, that canAssign should be - // integrated into the assignModelToNode. if (builder.canAssign(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue())) { builder.assignModelToNode(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java index 8c9499ca9e00c..9af2e4cd49b17 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java @@ -183,15 +183,9 @@ private AssignmentPlan swapOriginalModelsInPlan( for (Map.Entry assignment : nodeAssignments.entrySet()) { Node originalNode = originalNodeById.get(assignment.getKey().id()); planBuilder.assignModelToNode(originalDeployment, originalNode, assignment.getValue()); - if (originalDeployment.currentAllocationsByNodeId().containsKey(originalNode.id())) { - // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - long requiredMemory = originalDeployment.estimateMemoryUsageBytes( - originalDeployment.currentAllocationsByNodeId().get(originalNode.id()) - ); - planBuilder.accountMemory(m, originalNode, requiredMemory); - } + // As the node has all its available memory we need to manually account memory of models with + // current allocations. + planBuilder.accountMemory(originalDeployment, originalNode); } } return planBuilder.build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 597a97134a1e7..1ad7058cb1fdd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.ml.inference.deployment; @@ -53,6 +55,7 @@ import org.elasticsearch.xpack.ml.inference.pytorch.results.ThreadSettings; import java.io.IOException; +import java.time.Duration; import java.time.Instant; import java.util.Objects; import java.util.Optional; @@ -74,6 +77,7 @@ public class DeploymentManager { private static final Logger logger = LogManager.getLogger(DeploymentManager.class); private static final AtomicLong requestIdCounter = new AtomicLong(1); + public static final int NUM_RESTART_ATTEMPTS = 3; private final Client client; private final NamedXContentRegistry xContentRegistry; @@ -131,6 +135,14 @@ ProcessContext addProcessContext(Long id, ProcessContext processContext) { } public void startDeployment(TrainedModelDeploymentTask task, ActionListener finalListener) { + startDeployment(task, null, finalListener); + } + + public void startDeployment( + TrainedModelDeploymentTask task, + Integer startsCount, + ActionListener finalListener + ) { logger.info("[{}] Starting model deployment of model [{}]", task.getDeploymentId(), task.getModelId()); if (processContextByAllocation.size() >= maxProcesses) { @@ -144,7 +156,7 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener process = new SetOnce<>(); private final SetOnce nlpTaskProcessor = new SetOnce<>(); @@ -461,14 +475,13 @@ class ProcessContext { private final PriorityProcessWorkerExecutorService priorityProcessWorker; private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); private final AtomicInteger timeoutCount = new AtomicInteger(); + private final AtomicInteger startsCount = new AtomicInteger(); private volatile Instant startTime; private volatile Integer numThreadsPerAllocation; private volatile Integer numAllocations; private volatile boolean isStopped; - private static final TimeValue COMPLETION_TIMEOUT = TimeValue.timeValueMinutes(3); - - ProcessContext(TrainedModelDeploymentTask task) { + ProcessContext(TrainedModelDeploymentTask task, Integer startsCount) { this.task = Objects.requireNonNull(task); resultProcessor = new PyTorchResultProcessor(task.getDeploymentId(), threadSettings -> { this.numThreadsPerAllocation = threadSettings.numThreadsPerAllocation(); @@ -485,6 +498,7 @@ class ProcessContext { PROCESS_NAME, task.getParams().getQueueCapacity() ); + this.startsCount.set(startsCount == null ? 1 : startsCount); } PyTorchResultProcessor getResultProcessor() { @@ -507,7 +521,7 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene task, executorServiceForProcess, () -> resultProcessor.awaitCompletion(COMPLETION_TIMEOUT.getMinutes(), TimeUnit.MINUTES), - this::onProcessCrash + onProcessCrashHandleRestarts(startsCount) ) ); startTime = Instant.now(); @@ -530,6 +544,57 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene } } + private Consumer onProcessCrashHandleRestarts(AtomicInteger startsCount) { + return (reason) -> { + if (isThisProcessOlderThan1Day()) { + startsCount.set(1); + logger.error( + "[{}] inference process crashed due to reason [{}]. This process was started more than 24 hours ago; " + + "the starts count is reset to 1.", + task.getDeploymentId(), + reason + ); + } else { + logger.error("[{}] inference process crashed due to reason [{}]", task.getDeploymentId(), reason); + } + + processContextByAllocation.remove(task.getId()); + isStopped = true; + resultProcessor.stop(); + stateStreamer.cancel(); + + if (startsCount.get() <= NUM_RESTART_ATTEMPTS) { + logger.info("[{}] restarting inference process after [{}] starts", task.getDeploymentId(), startsCount.get()); + priorityProcessWorker.shutdownNow(); // TODO what to do with these tasks? + ActionListener errorListener = ActionListener.wrap((trainedModelDeploymentTask -> { + logger.debug("Completed restart of inference process, the [{}] start", startsCount); + }), + (e) -> finishClosingProcess( + startsCount, + "Failed to restart inference process because of error [" + e.getMessage() + "]" + ) + ); + + startDeployment(task, startsCount.incrementAndGet(), errorListener); + } else { + finishClosingProcess(startsCount, reason); + } + }; + } + + private boolean isThisProcessOlderThan1Day() { + return startTime.isBefore(Instant.now().minus(Duration.ofDays(1))); + } + + private void finishClosingProcess(AtomicInteger startsCount, String reason) { + logger.warn("[{}] inference process failed after [{}] starts, not restarting again", task.getDeploymentId(), startsCount.get()); + priorityProcessWorker.shutdownNowWithError(new IllegalStateException(reason)); + if (nlpTaskProcessor.get() != null) { + nlpTaskProcessor.get().close(); + } + task.setFailed("inference process crashed due to reason [" + reason + "]"); + } + void startPriorityProcessWorker() { executorServiceForProcess.submit(priorityProcessWorker::start); } @@ -643,19 +708,6 @@ private void closeProcessIfPresent() { } } - private void onProcessCrash(String reason) { - logger.error("[{}] inference process crashed due to reason [{}]", task.getDeploymentId(), reason); - processContextByAllocation.remove(task.getId()); - isStopped = true; - resultProcessor.stop(); - stateStreamer.cancel(); - priorityProcessWorker.shutdownNowWithError(new IllegalStateException(reason)); - if (nlpTaskProcessor.get() != null) { - nlpTaskProcessor.get().close(); - } - task.setFailed("inference process crashed due to reason [" + reason + "]"); - } - void loadModel(TrainedModelLocation modelLocation, ActionListener listener) { if (isStopped) { listener.onFailure(new IllegalArgumentException("Process has stopped, model loading canceled")); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java index cd7ed9e3eb55a..851dd8744d03e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java @@ -171,16 +171,8 @@ public void infer( ); return; } - trainedModelAssignmentNodeService.infer( - this, - update.apply(inferenceConfigHolder.get()), - input, - skipQueue, - timeout, - prefixType, - parentActionTask, - listener - ); + var updatedConfig = update.isEmpty() ? inferenceConfigHolder.get() : inferenceConfigHolder.get().apply(update); + trainedModelAssignmentNodeService.infer(this, updatedConfig, input, skipQueue, timeout, prefixType, parentActionTask, listener); } public Optional modelStats() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java index ffd70849d8f1c..fe0bd18b2147a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java @@ -175,7 +175,10 @@ public void infer(Map fields, InferenceConfigUpdate update, Acti listener.onResponse(new WarningInferenceResults(Messages.getMessage(INFERENCE_WARNING_ALL_FIELDS_MISSING, modelId))); return; } - InferenceResults inferenceResults = trainedModelDefinition.infer(flattenedFields, update.apply(inferenceConfig)); + InferenceResults inferenceResults = trainedModelDefinition.infer( + flattenedFields, + update.isEmpty() ? inferenceConfig : inferenceConfig.apply(update) + ); if (shouldPersistStats) { persistStats(false); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java index 5a2e3d29df949..9014c79f0af98 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java @@ -53,7 +53,10 @@ public void setNextReader(LeafReaderContext segmentContext) { public void addFeatures(Map featureMap, int docId) throws IOException { Source source = sourceLookup.getSource(this.segmentContext, docId); for (FieldValueFetcher vf : this.valueFetcherList) { - featureMap.put(vf.fieldName(), vf.valueFetcher().fetchValues(source, docId, new ArrayList<>()).get(0)); + List values = vf.valueFetcher().fetchValues(source, docId, new ArrayList<>()); + if (values.isEmpty() == false) { + featureMap.put(vf.fieldName(), values.get(0)); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index 068462bcdfca2..4e3fa3addaf30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -55,6 +55,15 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r if (ltrRescoreContext.regressionModelDefinition == null) { throw new IllegalStateException("local model reference is null, missing rewriteAndFetch before rescore phase?"); } + + if (rescoreContext.getWindowSize() < topDocs.scoreDocs.length) { + throw new IllegalArgumentException( + "Rescore window is too small and should be at least the value of from + size but was [" + + rescoreContext.getWindowSize() + + "]" + ); + } + LocalModel definition = ltrRescoreContext.regressionModelDefinition; // First take top slice of incoming docs, to be rescored: diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java index 11676cc4a1599..a5a7859a7f938 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java @@ -32,10 +32,10 @@ public class LearningToRankRescorerBuilder extends RescorerBuilder { - public static final String NAME = "learning_to_rank"; - private static final ParseField MODEL_FIELD = new ParseField("model_id"); - private static final ParseField PARAMS_FIELD = new ParseField("params"); - private static final ObjectParser PARSER = new ObjectParser<>(NAME, false, Builder::new); + public static final ParseField NAME = new ParseField("learning_to_rank"); + public static final ParseField MODEL_FIELD = new ParseField("model_id"); + public static final ParseField PARAMS_FIELD = new ParseField("params"); + private static final ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), false, Builder::new); static { PARSER.declareString(Builder::setModelId, MODEL_FIELD); @@ -251,7 +251,7 @@ protected LearningToRankRescorerContext innerBuildContext(int windowSize, Search @Override public String getWriteableName() { - return NAME; + return NAME.getPreferredName(); } @Override @@ -260,6 +260,11 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); } + @Override + protected boolean isWindowSizeRequired() { + return true; + } + @Override protected void doWriteTo(StreamOutput out) throws IOException { assert localModel == null || rescoreOccurred : "Unnecessarily populated local model object"; @@ -270,7 +275,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); + builder.startObject(NAME.getPreferredName()); builder.field(MODEL_FIELD.getPreferredName(), modelId); if (this.params != null) { builder.field(PARAMS_FIELD.getPreferredName(), this.params); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java index 11b699df66b83..0bfc64c9b0027 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java @@ -69,7 +69,7 @@ public Vocabulary(StreamInput in) throws IOException { } else { merges = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { scores = in.readCollectionAsList(StreamInput::readDouble); } else { scores = List.of(); @@ -95,7 +95,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { out.writeStringCollection(merges); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeCollection(scores, StreamOutput::writeDouble); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index d267966a1d795..b502e0d6db341 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -30,6 +29,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Numbers; @@ -52,6 +52,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Sum; @@ -419,7 +420,7 @@ public void getTrainedModelMetadata( })); } - public void refreshInferenceIndex(ActionListener listener) { + public void refreshInferenceIndex(ActionListener listener) { executeAsyncWithOrigin( client, ML_ORIGIN, @@ -663,7 +664,7 @@ public void getTrainedModel( ActionListener trainedModelSearchHandler = ActionListener.wrap(modelSearchResponse -> { TrainedModelConfig.Builder builder; try { - builder = handleHits(modelSearchResponse.getHits().getHits(), modelId, this::parseModelConfigLenientlyFromSource).get(0); + builder = handleHits(modelSearchResponse.getHits(), modelId, this::parseModelConfigLenientlyFromSource).get(0); } catch (ResourceNotFoundException ex) { getTrainedModelListener.onFailure( new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)) @@ -701,7 +702,7 @@ public void getTrainedModel( ActionListener.wrap(definitionSearchResponse -> { try { List docs = handleHits( - definitionSearchResponse.getHits().getHits(), + definitionSearchResponse.getHits(), modelId, (bytes, resourceId) -> ChunkedTrainedModelRestorer.parseModelDefinitionDocLenientlyFromSource( bytes, @@ -1268,15 +1269,15 @@ private static Set matchedResourceIds(String[] tokens) { } private static List handleHits( - SearchHit[] hits, + SearchHits hits, String resourceId, CheckedBiFunction parseLeniently ) throws Exception { - if (hits.length == 0) { + if (hits.getHits().length == 0) { throw new ResourceNotFoundException(resourceId); } - List results = new ArrayList<>(hits.length); - String initialIndex = hits[0].getIndex(); + List results = new ArrayList<>(hits.getHits().length); + String initialIndex = hits.getAt(0).getIndex(); for (SearchHit hit : hits) { // We don't want to spread across multiple backing indices if (hit.getIndex().equals(initialIndex)) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index ac16948e32ed6..577bbe3dac6ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; @@ -23,6 +22,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -451,7 +451,7 @@ private void deleteResultsByQuery( ) { assert indices.length > 0; - ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { + ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { logger.info("[{}] running delete by query on [{}]", jobId, String.join(", ", indices)); ConstantScoreQueryBuilder query = new ConstantScoreQueryBuilder(new TermQueryBuilder(Job.ID.getPreferredName(), jobId)); DeleteByQueryRequest request = new DeleteByQueryRequest(indices).setQuery(query) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index becbffefff8c8..f8f1e95fecd2e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -68,9 +68,9 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; @@ -546,7 +546,7 @@ public void getDataCountsModelSizeAndTimingStats( request.setParentTask(parentTaskId); } executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, ActionListener.wrap(response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); if (aggs == null) { handler.apply(new DataCounts(jobId), new ModelSizeStats.Builder(jobId).build(), new TimingStats(jobId)); return; @@ -1602,7 +1602,7 @@ void calculateEstablishedMemoryUsage( ML_ORIGIN, search.request(), ActionListener.wrap(response -> { - List aggregations = response.getAggregations().asList(); + List aggregations = response.getAggregations().asList(); if (aggregations.size() == 1) { ExtendedStats extendedStats = (ExtendedStats) aggregations.get(0); long count = extendedStats.getCount(); @@ -1810,12 +1810,12 @@ public void getForecastStats( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { handler.accept(new ForecastStats()); return; } - Map aggregationsAsMap = aggregations.asMap(); + Map aggregationsAsMap = aggregations.asMap(); StatsAccumulator memoryStats = StatsAccumulator.fromStatsAggregation( (Stats) aggregationsAsMap.get(ForecastStats.Fields.MEMORY) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index 6acffc3a6f745..055c75d252281 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -8,7 +8,7 @@ import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Max; @@ -38,7 +38,7 @@ public OverallBucketsProvider(TimeValue maxJobBucketSpan, int topN, double minOv public List computeOverallBuckets(Histogram histogram) { List overallBuckets = new ArrayList<>(); for (Histogram.Bucket histogramBucket : histogram.getBuckets()) { - Aggregations histogramBucketAggs = histogramBucket.getAggregations(); + InternalAggregations histogramBucketAggs = histogramBucket.getAggregations(); Terms jobsAgg = histogramBucketAggs.get(Job.ID.getPreferredName()); int jobsCount = jobsAgg.getBuckets().size(); int bucketTopN = Math.min(topN, jobsCount); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 658db2997485d..d003578158f48 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -1073,7 +1073,7 @@ public ByteSizeValue getOpenProcessMemoryUsage() { ModelSizeStats modelSizeStats = processContext.getAutodetectCommunicator().getModelSizeStats(); ModelSizeStats.AssignmentMemoryBasis basis = modelSizeStats.getAssignmentMemoryBasis(); memoryUsedBytes += switch (basis != null ? basis : ModelSizeStats.AssignmentMemoryBasis.MODEL_MEMORY_LIMIT) { - case MODEL_MEMORY_LIMIT -> modelSizeStats.getModelBytesMemoryLimit(); + case MODEL_MEMORY_LIMIT -> Optional.ofNullable(modelSizeStats.getModelBytesMemoryLimit()).orElse(0L); case CURRENT_MODEL_BYTES -> modelSizeStats.getModelBytes(); case PEAK_MODEL_BYTES -> Optional.ofNullable(modelSizeStats.getPeakModelBytes()).orElse(modelSizeStats.getModelBytes()); }; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TaskRetriever.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TaskRetriever.java index 652592bb08591..b60f57e5aaaf6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TaskRetriever.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TaskRetriever.java @@ -7,20 +7,28 @@ package org.elasticsearch.xpack.ml.utils; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.transport.ReceiveTimeoutTransportException; import org.elasticsearch.xpack.core.ml.MlTasks; +import java.util.function.Supplier; + import static org.elasticsearch.xpack.core.ml.MlTasks.downloadModelTaskDescription; /** * Utility class for retrieving download tasks created by a PUT trained model API request. */ public class TaskRetriever { + /** * Returns a {@link TaskInfo} if one exists representing an in-progress trained model download. * @@ -28,16 +36,18 @@ public class TaskRetriever { * @param modelId the id of the model to check for an existing task * @param waitForCompletion a boolean flag determine if the request should wait for an existing task to complete before returning (aka * wait for the download to complete) + * @param timeout the timeout value in seconds that the request should fail if it does not complete + * @param errorMessageOnWaitTimeout Message to use if the request times out with {@code waitForCompletion == true} * @param listener a listener, if a task is found it is returned via {@code ActionListener.onResponse(taskInfo)}. * If a task is not found null is returned - * @param timeout the timeout value in seconds that the request should fail if it does not complete */ public static void getDownloadTaskInfo( Client client, String modelId, boolean waitForCompletion, - ActionListener listener, - TimeValue timeout + TimeValue timeout, + Supplier errorMessageOnWaitTimeout, + ActionListener listener ) { client.admin() .cluster() @@ -53,19 +63,46 @@ public static void getDownloadTaskInfo( if (tasks.size() > 0) { // there really shouldn't be more than a single task but if there is we'll just use the first one listener.onResponse(tasks.get(0)); + } else if (waitForCompletion && didItTimeout(response)) { + listener.onFailure(taskDidNotCompleteException(errorMessageOnWaitTimeout.get())); } else { + response.rethrowFailures("Checking model [" + modelId + "] download status"); listener.onResponse(null); } - }, - e -> listener.onFailure( + }, e -> { + listener.onFailure( new ElasticsearchStatusException( "Unable to retrieve task information for model id [{}]", RestStatus.INTERNAL_SERVER_ERROR, e, modelId ) - ) - )); + ); + })); + } + + private static boolean didItTimeout(ListTasksResponse response) { + if (response.getNodeFailures().isEmpty() == false) { + // if one node timed out then the others will also have timed out + var firstNodeFailure = response.getNodeFailures().get(0); + if (firstNodeFailure.status() == RestStatus.REQUEST_TIMEOUT) { + return true; + } + + var timeoutException = ExceptionsHelper.unwrap( + firstNodeFailure, + ElasticsearchTimeoutException.class, + ReceiveTimeoutTransportException.class + ); + if (timeoutException != null) { + return true; + } + } + return false; + } + + private static ElasticsearchException taskDidNotCompleteException(String message) { + return new ElasticsearchStatusException(message, RestStatus.REQUEST_TIMEOUT); } private TaskRetriever() {} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java index f5f81a5ca15f3..28cdb31700a29 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.test.ESTestCase; @@ -223,19 +222,19 @@ public void testAnomalyDetectionOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, true, false, false, false)); try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { - List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); + List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, hasItem(instanceOf(RestGetJobsAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetTrainedModelsAction.class)))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class)))); assertThat(restHandlers, not(hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class)))); - List actions = machineLearning.getActions().stream().map(ActionPlugin.ActionHandler::getAction).toList(); - assertThat(actions, hasItem(instanceOf(XPackUsageFeatureAction.class))); - assertThat(actions, hasItem(instanceOf(MlInfoAction.class))); - assertThat(actions, hasItem(instanceOf(GetJobsAction.class))); - assertThat(actions, not(hasItem(instanceOf(GetTrainedModelsAction.class)))); - assertThat(actions, not(hasItem(instanceOf(GetDataFrameAnalyticsAction.class)))); - assertThat(actions, not(hasItem(instanceOf(StartTrainedModelDeploymentAction.class)))); + List actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList(); + assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING)); + assertThat(actions, hasItem(MlInfoAction.INSTANCE)); + assertThat(actions, hasItem(GetJobsAction.INSTANCE)); + assertThat(actions, not(hasItem(GetTrainedModelsAction.INSTANCE))); + assertThat(actions, not(hasItem(GetDataFrameAnalyticsAction.INSTANCE))); + assertThat(actions, not(hasItem(StartTrainedModelDeploymentAction.INSTANCE))); } } @@ -243,19 +242,19 @@ public void testDataFrameAnalyticsOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, true, false, false)); try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { - List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); + List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class)))); assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class))); assertThat(restHandlers, hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class)))); - List actions = machineLearning.getActions().stream().map(ActionPlugin.ActionHandler::getAction).toList(); - assertThat(actions, hasItem(instanceOf(XPackUsageFeatureAction.class))); - assertThat(actions, hasItem(instanceOf(MlInfoAction.class))); - assertThat(actions, not(hasItem(instanceOf(GetJobsAction.class)))); - assertThat(actions, hasItem(instanceOf(GetTrainedModelsAction.class))); - assertThat(actions, hasItem(instanceOf(GetDataFrameAnalyticsAction.class))); - assertThat(actions, not(hasItem(instanceOf(StartTrainedModelDeploymentAction.class)))); + List actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList(); + assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING)); + assertThat(actions, hasItem(MlInfoAction.INSTANCE)); + assertThat(actions, not(hasItem(GetJobsAction.INSTANCE))); + assertThat(actions, hasItem(GetTrainedModelsAction.INSTANCE)); + assertThat(actions, hasItem(GetDataFrameAnalyticsAction.INSTANCE)); + assertThat(actions, not(hasItem(StartTrainedModelDeploymentAction.INSTANCE))); } } @@ -263,19 +262,19 @@ public void testNlpOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, false, true, false)); try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { - List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); + List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class)))); assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class)))); assertThat(restHandlers, hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class))); - List actions = machineLearning.getActions().stream().map(ActionPlugin.ActionHandler::getAction).toList(); - assertThat(actions, hasItem(instanceOf(XPackUsageFeatureAction.class))); - assertThat(actions, hasItem(instanceOf(MlInfoAction.class))); - assertThat(actions, not(hasItem(instanceOf(GetJobsAction.class)))); - assertThat(actions, hasItem(instanceOf(GetTrainedModelsAction.class))); - assertThat(actions, not(hasItem(instanceOf(GetDataFrameAnalyticsAction.class)))); - assertThat(actions, hasItem(instanceOf(StartTrainedModelDeploymentAction.class))); + List actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList(); + assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING)); + assertThat(actions, hasItem(MlInfoAction.INSTANCE)); + assertThat(actions, not(hasItem(GetJobsAction.INSTANCE))); + assertThat(actions, hasItem(GetTrainedModelsAction.INSTANCE)); + assertThat(actions, not(hasItem(GetDataFrameAnalyticsAction.INSTANCE))); + assertThat(actions, hasItem(StartTrainedModelDeploymentAction.INSTANCE)); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java index a393f691ae004..f0ac79de17cab 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java @@ -253,6 +253,7 @@ public void testFindLongTimeUnassignedTasks() { Instant eightHoursAgo = now.minus(Duration.ofHours(8)); Instant sevenHoursAgo = eightHoursAgo.plus(Duration.ofHours(1)); Instant twoHoursAgo = sevenHoursAgo.plus(Duration.ofHours(5)); + Instant tomorrow = now.plus(Duration.ofHours(24)); PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); addJobTask("job1", "node1", JobState.OPENED, tasksBuilder); @@ -304,5 +305,16 @@ public void testFindLongTimeUnassignedTasks() { "[xpack/ml/job]/[job3] unassigned for [28800] seconds" ) ); + + tasksBuilder = PersistentTasksCustomMetadata.builder(); + addJobTask("job1", null, JobState.FAILED, tasksBuilder); + addJobTask("job2", null, JobState.FAILED, tasksBuilder); + addJobTask("job3", null, JobState.FAILED, tasksBuilder); + addJobTask("job4", null, JobState.FAILED, tasksBuilder); + addJobTask("job5", "node1", JobState.FAILED, tasksBuilder); + itemsToReport = notifier.findLongTimeUnassignedTasks(tomorrow, tasksBuilder.build()); + // We still have unassigned jobs, but now all the jobs are failed, so none should be reported as unassigned + // as it doesn't make any difference whether they're assigned or not and autoscaling will ignore them + assertThat(itemsToReport, empty()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java index 92ceb536cfd43..29a8a35ff0fdd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java @@ -86,7 +86,7 @@ private static SearchHit createForecastStatsHit(ForecastRequestStats.ForecastReq ForecastRequestStats.STATUS.getPreferredName(), new DocumentField(ForecastRequestStats.STATUS.getPreferredName(), Collections.singletonList(status.toString())) ); - SearchHit hit = new SearchHit(0, ""); + SearchHit hit = SearchHit.unpooled(0, ""); hit.addDocumentFields(documentFields, Map.of()); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java index 4f1a99f634a0a..9402a358dc305 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -57,9 +56,9 @@ public void tearDownThreadPool() { public void testCancelDownloadTaskCallsListenerWithNullWhenNoTasksExist() { var client = mockClientWithTasksResponse(Collections.emptyList(), threadPool); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); - cancelDownloadTask(client, "modelId", listener, TIMEOUT); + cancelDownloadTask(client, "inferenceEntityId", listener, TIMEOUT); assertThat(listener.actionGet(TIMEOUT), nullValue()); } @@ -70,19 +69,19 @@ public void testCancelDownloadTaskCallsOnFailureWithErrorWhenCancellingFailsWith doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new Exception("cancel error")); return Void.TYPE; }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); - cancelDownloadTask(client, "modelId", listener, TIMEOUT); + cancelDownloadTask(client, "inferenceEntityId", listener, TIMEOUT); var exception = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(exception.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(exception.getMessage(), is("Unable to cancel task for model id [modelId]")); + assertThat(exception.getMessage(), is("Unable to cancel task for model id [inferenceEntityId]")); } public void testCancelDownloadTaskCallsOnResponseNullWhenTheTaskNoLongerExistsWhenCancelling() { @@ -91,15 +90,15 @@ public void testCancelDownloadTaskCallsOnResponseNullWhenTheTaskNoLongerExistsWh doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new ResourceNotFoundException("task no longer there")); return Void.TYPE; }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); - cancelDownloadTask(client, "modelId", listener, TIMEOUT); + cancelDownloadTask(client, "inferenceEntityId", listener, TIMEOUT); assertThat(listener.actionGet(TIMEOUT), nullValue()); } @@ -115,24 +114,24 @@ public void testCancelDownloadTasksCallsGetsUnableToRetrieveTaskInfoError() { return Void.TYPE; }).when(client).execute(same(TransportListTasksAction.TYPE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); - cancelDownloadTask(client, "modelId", listener, TIMEOUT); + cancelDownloadTask(client, "inferenceEntityId", listener, TIMEOUT); var exception = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(exception.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(exception.getMessage(), is("Unable to retrieve existing task information for model id [modelId]")); + assertThat(exception.getMessage(), is("Unable to retrieve existing task information for model id [inferenceEntityId]")); } public void testCancelDownloadTaskCallsOnResponseWithTheCancelResponseWhenATaskExists() { var client = mockClientWithTasksResponse(getTaskInfoListOfOne(), threadPool); - var cancelResponse = mock(CancelTasksResponse.class); + var cancelResponse = mock(ListTasksResponse.class); mockCancelTasksResponse(client, cancelResponse); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); - cancelDownloadTask(client, "modelId", listener, TIMEOUT); + cancelDownloadTask(client, "inferenceEntityId", listener, TIMEOUT); assertThat(listener.actionGet(TIMEOUT), is(cancelResponse)); } @@ -142,12 +141,12 @@ private static void mockCancelTask(Client client) { when(cluster.prepareCancelTasks()).thenReturn(new CancelTasksRequestBuilder(client)); } - private static void mockCancelTasksResponse(Client client, CancelTasksResponse response) { + private static void mockCancelTasksResponse(Client client, ListTasksResponse response) { mockCancelTask(client); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(response); return Void.TYPE; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java index a15bec8c110d6..6c39f7ad2a722 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.SearchIntervalTests; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.junit.Before; import org.mockito.stubbing.Answer; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java index 6ed7a3311c94a..73810e4e0046f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java @@ -163,7 +163,7 @@ public void testCheckForExistingTaskCallsOnFailureForAnError() { TransportPutTrainedModelAction.checkForExistingTask( client, - "modelId", + "inferenceEntityId", true, responseListener, new PlainActionFuture(), @@ -172,7 +172,7 @@ public void testCheckForExistingTaskCallsOnFailureForAnError() { var exception = expectThrows(ElasticsearchException.class, () -> responseListener.actionGet(TIMEOUT)); assertThat(exception.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(exception.getMessage(), is("Unable to retrieve task information for model id [modelId]")); + assertThat(exception.getMessage(), is("Unable to retrieve task information for model id [inferenceEntityId]")); } public void testCheckForExistingTaskCallsStoreModelListenerWhenNoTasksExist() { @@ -180,7 +180,14 @@ public void testCheckForExistingTaskCallsStoreModelListenerWhenNoTasksExist() { var storeListener = new PlainActionFuture(); - TransportPutTrainedModelAction.checkForExistingTask(client, "modelId", true, new PlainActionFuture<>(), storeListener, TIMEOUT); + TransportPutTrainedModelAction.checkForExistingTask( + client, + "inferenceEntityId", + true, + new PlainActionFuture<>(), + storeListener, + TIMEOUT + ); assertThat(storeListener.actionGet(TIMEOUT), nullValue()); } @@ -190,16 +197,26 @@ public void testCheckForExistingTaskThrowsNoModelFoundError() { prepareGetTrainedModelResponse(client, Collections.emptyList()); var respListener = new PlainActionFuture(); - TransportPutTrainedModelAction.checkForExistingTask(client, "modelId", true, respListener, new PlainActionFuture<>(), TIMEOUT); + TransportPutTrainedModelAction.checkForExistingTask( + client, + "inferenceEntityId", + true, + respListener, + new PlainActionFuture<>(), + TIMEOUT + ); var exception = expectThrows(ElasticsearchException.class, () -> respListener.actionGet(TIMEOUT)); - assertThat(exception.getMessage(), is("No model information found for a concurrent create model execution for model id [modelId]")); + assertThat( + exception.getMessage(), + is("No model information found for a concurrent create model execution for model id [inferenceEntityId]") + ); } public void testCheckForExistingTaskReturnsTask() { var client = mockClientWithTasksResponse(getTaskInfoListOfOne(), threadPool); - TrainedModelConfig trainedModel = TrainedModelConfigTests.createTestInstance("modelId") + TrainedModelConfig trainedModel = TrainedModelConfigTests.createTestInstance("inferenceEntityId") .setTags(Collections.singletonList("prepackaged")) .setModelSize(1000) .setEstimatedOperations(2000) @@ -207,7 +224,14 @@ public void testCheckForExistingTaskReturnsTask() { prepareGetTrainedModelResponse(client, List.of(trainedModel)); var respListener = new PlainActionFuture(); - TransportPutTrainedModelAction.checkForExistingTask(client, "modelId", true, respListener, new PlainActionFuture<>(), TIMEOUT); + TransportPutTrainedModelAction.checkForExistingTask( + client, + "inferenceEntityId", + true, + respListener, + new PlainActionFuture<>(), + TIMEOUT + ); var returnedModel = respListener.actionGet(TIMEOUT); assertThat(returnedModel.getResponse().getModelId(), is(trainedModel.getModelId())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java index be8b72d26fd71..67d0e6f35bafc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java @@ -10,14 +10,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.ml.MachineLearningTests; import org.junit.After; import org.junit.Before; @@ -27,7 +22,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -52,18 +46,6 @@ protected SearchPlugin registerPlugin() { return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(CategorizeTextAggregationBuilder.NAME), - (p, c) -> ParsedCategorization.fromXContent(p, (String) c) - ) - ); - } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/87240") public void testReduceRandom() { // The bug is in the assertReduced() method immediately below that the base class testReduceRandom() calls. @@ -80,11 +62,6 @@ protected void assertReduced(InternalCategorizationAggregation reduced, List excludePathsFromXContentInsertion() { - return p -> p.contains("key"); - } - @Override protected InternalCategorizationAggregation createTestInstance( String name, @@ -109,11 +86,6 @@ protected InternalCategorizationAggregation createTestInstance( ); } - @Override - protected Class> implementationClass() { - return ParsedCategorization.class; - } - private static Map toCounts(Stream buckets) { return buckets.collect( Collectors.toMap( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/ParsedCategorization.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/ParsedCategorization.java deleted file mode 100644 index 1402f3c987ac3..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/ParsedCategorization.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.aggs.categorization; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.function.Supplier; - -// TODO: how close to the actual InternalCategorizationAggregation.Bucket class does this have to be to add any value? -class ParsedCategorization extends ParsedMultiBucketAggregation { - - @Override - public String getType() { - return CategorizeTextAggregationBuilder.NAME; - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedCategorization.class.getSimpleName(), - true, - ParsedCategorization::new - ); - static { - declareMultiBucketAggregationFields(PARSER, ParsedBucket::fromXContent, ParsedBucket::fromXContent); - } - - public static ParsedCategorization fromXContent(XContentParser parser, String name) throws IOException { - ParsedCategorization aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - @Override - public List getBuckets() { - return buckets; - } - - public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements MultiBucketsAggregation.Bucket { - - private InternalCategorizationAggregation.BucketKey key; - private String regex; - private int maxMatchingLength; - - protected void setKeyAsString(String keyAsString) { - if (keyAsString == null) { - key = null; - return; - } - if (keyAsString.isEmpty()) { - key = new InternalCategorizationAggregation.BucketKey(new BytesRef[0]); - return; - } - String[] split = Strings.tokenizeToStringArray(keyAsString, " "); - key = new InternalCategorizationAggregation.BucketKey( - split == null - ? new BytesRef[] { new BytesRef(keyAsString) } - : Arrays.stream(split).map(BytesRef::new).toArray(BytesRef[]::new) - ); - } - - private void setRegex(String regex) { - this.regex = regex; - } - - private void setMaxMatchingLength(int maxMatchingLength) { - this.maxMatchingLength = maxMatchingLength; - } - - @Override - public Object getKey() { - return key; - } - - @Override - public String getKeyAsString() { - return key.toString(); - } - - @Override - protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { - return builder.field(CommonFields.KEY.getPreferredName(), getKey()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - keyToXContent(builder); - builder.field(CategoryDefinition.REGEX.getPreferredName(), regex); - builder.field(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); - builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - getAggregations().toXContentInternal(builder, params); - builder.endObject(); - return builder; - } - - static InternalCategorizationAggregation.BucketKey parsedKey(final XContentParser parser) throws IOException { - if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - String toSplit = parser.text(); - String[] split = Strings.tokenizeToStringArray(toSplit, " "); - return new InternalCategorizationAggregation.BucketKey( - split == null - ? new BytesRef[] { new BytesRef(toSplit) } - : Arrays.stream(split).map(BytesRef::new).toArray(BytesRef[]::new) - ); - } else { - return new InternalCategorizationAggregation.BucketKey( - XContentParserUtils.parseList(parser, p -> new BytesRef(p.binaryValue())).toArray(BytesRef[]::new) - ); - } - } - - protected static ParsedBucket parseCategorizationBucketXContent( - final XContentParser parser, - final Supplier bucketSupplier, - final CheckedBiConsumer keyConsumer - ) throws IOException { - final ParsedBucket bucket = bucketSupplier.get(); - XContentParser.Token token; - String currentFieldName = parser.currentName(); - - List aggregations = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) { - bucket.setKeyAsString(parser.text()); - } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { - keyConsumer.accept(parser, bucket); - } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) { - bucket.setDocCount(parser.longValue()); - } else if (CategoryDefinition.REGEX.getPreferredName().equals(currentFieldName)) { - bucket.setRegex(parser.text()); - } else if (CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName().equals(currentFieldName)) { - bucket.setMaxMatchingLength(parser.intValue()); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { - keyConsumer.accept(parser, bucket); - } else { - XContentParserUtils.parseTypedKeysObject( - parser, - Aggregation.TYPED_KEYS_DELIMITER, - Aggregation.class, - aggregations::add - ); - } - } - } - bucket.setAggregations(new Aggregations(aggregations)); - return bucket; - } - - static ParsedBucket fromXContent(final XContentParser parser) throws IOException { - return parseCategorizationBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = parsedKey(p)); - } - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java index e12baeab68f7b..73131efbbcf4b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java @@ -60,31 +60,30 @@ public void testStationaryFalsePositiveRate() throws IOException { int fp = 0; for (int i = 0; i < 100; i++) { double[] bucketValues = DoubleStream.generate(() -> 10 + normal.sample()).limit(40).toArray(); - ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-3); + ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-4); fp += test.type() == ChangePointAggregator.Type.STATIONARY ? 0 : 1; } - assertThat(fp, lessThan(5)); + assertThat(fp, lessThan(10)); fp = 0; GammaDistribution gamma = new GammaDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 1, 2); for (int i = 0; i < 100; i++) { double[] bucketValues = DoubleStream.generate(() -> gamma.sample()).limit(40).toArray(); - ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-3); + ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-4); fp += test.type() == ChangePointAggregator.Type.STATIONARY ? 0 : 1; } - assertThat(fp, lessThan(5)); + assertThat(fp, lessThan(10)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103848") public void testSampledDistributionTestFalsePositiveRate() throws IOException { NormalDistribution normal = new NormalDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 0.0, 1.0); int fp = 0; for (int i = 0; i < 100; i++) { double[] bucketValues = DoubleStream.generate(() -> 10 + normal.sample()).limit(5000).toArray(); - ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 0.05); + ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-4); fp += test.type() == ChangePointAggregator.Type.STATIONARY ? 0 : 1; } - assertThat(fp, lessThan(5)); + assertThat(fp, lessThan(10)); } public void testNonStationaryFalsePositiveRate() throws IOException { @@ -93,23 +92,22 @@ public void testNonStationaryFalsePositiveRate() throws IOException { for (int i = 0; i < 100; i++) { AtomicInteger j = new AtomicInteger(); double[] bucketValues = DoubleStream.generate(() -> j.incrementAndGet() + normal.sample()).limit(40).toArray(); - ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-3); + ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-4); fp += test.type() == ChangePointAggregator.Type.NON_STATIONARY ? 0 : 1; } - assertThat(fp, lessThan(5)); + assertThat(fp, lessThan(10)); fp = 0; GammaDistribution gamma = new GammaDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 1, 2); for (int i = 0; i < 100; i++) { AtomicInteger j = new AtomicInteger(); double[] bucketValues = DoubleStream.generate(() -> j.incrementAndGet() + gamma.sample()).limit(40).toArray(); - ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-3); + ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 1e-4); fp += test.type() == ChangePointAggregator.Type.NON_STATIONARY ? 0 : 1; } - assertThat(fp, lessThan(5)); + assertThat(fp, lessThan(10)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103847") public void testStepChangePower() throws IOException { NormalDistribution normal = new NormalDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 0, 2); int tp = 0; @@ -121,7 +119,7 @@ public void testStepChangePower() throws IOException { ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 0.05); tp += test.type() == ChangePointAggregator.Type.STEP_CHANGE ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); tp = 0; GammaDistribution gamma = new GammaDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 1, 2); @@ -133,7 +131,7 @@ public void testStepChangePower() throws IOException { ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 0.05); tp += test.type() == ChangePointAggregator.Type.STEP_CHANGE ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); } public void testTrendChangePower() throws IOException { @@ -148,7 +146,7 @@ public void testTrendChangePower() throws IOException { ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 0.05); tp += test.type() == ChangePointAggregator.Type.TREND_CHANGE ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); tp = 0; GammaDistribution gamma = new GammaDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 1, 2); @@ -161,7 +159,7 @@ public void testTrendChangePower() throws IOException { ChangePointAggregator.TestStats test = ChangePointAggregator.testForChange(bucketValues, 0.05); tp += test.type() == ChangePointAggregator.Type.TREND_CHANGE ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); } public void testDistributionChangeTestPower() throws IOException { @@ -253,14 +251,18 @@ public void testConstant() throws IOException { ); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103926") public void testSlopeUp() throws IOException { NormalDistribution normal = new NormalDistribution(RandomGeneratorFactory.createRandomGenerator(Randomness.get()), 0, 2); AtomicInteger i = new AtomicInteger(); double[] bucketValues = DoubleStream.generate(() -> i.addAndGet(1) + normal.sample()).limit(40).toArray(); testChangeType(bucketValues, changeType -> { - assertThat(changeType, instanceOf(ChangeType.NonStationary.class)); - assertThat(Arrays.toString(bucketValues), ((ChangeType.NonStationary) changeType).getTrend(), equalTo("increasing")); + if (changeType instanceof ChangeType.NonStationary) { + assertThat(Arrays.toString(bucketValues), ((ChangeType.NonStationary) changeType).getTrend(), equalTo("increasing")); + } else { + // Handle infrequent false positives. + assertThat(changeType, instanceOf(ChangeType.TrendChange.class)); + } + }); } @@ -269,8 +271,12 @@ public void testSlopeDown() throws IOException { AtomicInteger i = new AtomicInteger(40); double[] bucketValues = DoubleStream.generate(() -> i.decrementAndGet() + normal.sample()).limit(40).toArray(); testChangeType(bucketValues, changeType -> { - assertThat(changeType, instanceOf(ChangeType.NonStationary.class)); - assertThat(Arrays.toString(bucketValues), ((ChangeType.NonStationary) changeType).getTrend(), equalTo("decreasing")); + if (changeType instanceof ChangeType.NonStationary) { + assertThat(Arrays.toString(bucketValues), ((ChangeType.NonStationary) changeType).getTrend(), equalTo("decreasing")); + } else { + // Handle infrequent false positives. + assertThat(changeType, instanceOf(ChangeType.TrendChange.class)); + } }); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java index ccaa6b4f0f4ec..85219b0e62c6c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java @@ -13,19 +13,14 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.ml.MachineLearningTests; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.InternalItemSetMapReduceAggregationTests.WordCountMapReducer.WordCounts; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.ItemSetMapReduceValueSource.Field; @@ -40,7 +35,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.function.Supplier; -import java.util.stream.Collectors; import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; @@ -154,49 +148,6 @@ protected WordCounts combine(Stream partitions, WordCounts mapReduce } - static class ParsedWordCountMapReduceAggregation extends ParsedAggregation { - - private Map frequencies; - - @SuppressWarnings("unchecked") - static ParsedWordCountMapReduceAggregation fromXContent(XContentParser parser, final String name) throws IOException { - Map values = parser.map(); - Map frequencies = ((Map) values.getOrDefault( - Aggregation.CommonFields.BUCKETS.getPreferredName(), - Collections.emptyMap() - )).entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> ((Integer) e.getValue()).longValue())); - - ParsedWordCountMapReduceAggregation parsed = new ParsedWordCountMapReduceAggregation( - frequencies, - (Map) values.get(InternalAggregation.CommonFields.META.getPreferredName()) - ); - parsed.setName(name); - return parsed; - } - - ParsedWordCountMapReduceAggregation(Map frequencies, Map metadata) { - this.frequencies = frequencies; - this.metadata = metadata; - } - - @Override - public String getType() { - return WordCountMapReducer.AGG_NAME; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (frequencies.isEmpty() == false) { - builder.field(Aggregation.CommonFields.BUCKETS.getPreferredName(), getFrequencies()); - } - return builder; - } - - public Map getFrequencies() { - return frequencies; - } - } - @Override protected InternalItemSetMapReduceAggregation createTestInstance( String name, @@ -233,18 +184,6 @@ protected void assertReduced( assertMapEquals(expectedFrequencies2, wcReduced.frequencies); } - @Override - protected void assertFromXContent( - InternalItemSetMapReduceAggregation aggregation, - ParsedAggregation parsedAggregation - ) throws IOException { - ParsedWordCountMapReduceAggregation parsed = (ParsedWordCountMapReduceAggregation) parsedAggregation; - assertThat(parsed.getName(), equalTo(aggregation.getName())); - - WordCountMapReducer.WordCounts wc = aggregation.getMapReduceResult(); - assertMapEquals(wc.frequencies, parsed.getFrequencies()); - } - @Override protected SearchPlugin registerPlugin() { return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); @@ -268,18 +207,6 @@ protected List getNamedWriteables() { return namedWritables; } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(WordCountMapReducer.AGG_NAME), - (p, c) -> ParsedWordCountMapReduceAggregation.fromXContent(p, (String) c) - ) - ); - } - private static void assertMapEquals(Map expected, Map actual) { assertThat(expected.size(), equalTo(actual.size())); for (Entry entry : expected.entrySet()) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java index fc1858167e7d7..4045ac51fb1ce 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java @@ -8,22 +8,14 @@ package org.elasticsearch.xpack.ml.aggs.inference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InvalidAggregationPathException; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xpack.core.ml.inference.results.ClassificationFeatureImportance; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.RegressionFeatureImportance; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; import org.elasticsearch.xpack.ml.MachineLearningTests; @@ -32,7 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static org.hamcrest.Matchers.sameInstance; @@ -43,23 +34,6 @@ protected SearchPlugin registerPlugin() { return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(InferencePipelineAggregationBuilder.NAME), - (p, c) -> ParsedInference.fromXContent(p, (String) c) - ) - ); - } - - @Override - protected Predicate excludePathsFromXContentInsertion() { - return p -> p.contains("top_classes") || p.contains("feature_importance"); - } - @Override protected InternalInferenceAggregation createTestInstance(String name, Map metadata) { InferenceResults result; @@ -97,37 +71,6 @@ protected void assertReduced(InternalInferenceAggregation reduced, List featureImportance = regression.getFeatureImportance(); - if (featureImportance.isEmpty()) { - featureImportance = null; - } - assertEquals(featureImportance, parsed.getFeatureImportance()); - } else if (result instanceof ClassificationInferenceResults classification) { - assertEquals(classification.predictedValue(), parsed.getValue()); - - List featureImportance = classification.getFeatureImportance(); - if (featureImportance.isEmpty()) { - featureImportance = null; - } - assertEquals(featureImportance, parsed.getFeatureImportance()); - - List topClasses = classification.getTopClasses(); - if (topClasses.isEmpty()) { - topClasses = null; - } - assertEquals(topClasses, parsed.getTopClasses()); - } - } - public void testGetProperty_givenEmptyPath() { InternalInferenceAggregation internalAgg = createTestInstance(); assertThat(internalAgg, sameInstance(internalAgg.getProperty(Collections.emptyList()))); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/ParsedInference.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/ParsedInference.java deleted file mode 100644 index 9b11fc166ef58..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/ParsedInference.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.aggs.inference; - -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.inference.results.SingleValueInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; -import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults.PREDICTION_PROBABILITY; -import static org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults.PREDICTION_SCORE; -import static org.elasticsearch.xpack.core.ml.inference.results.SingleValueInferenceResults.FEATURE_IMPORTANCE; - -/** - * There isn't enough information in toXContent representation of the - * {@link org.elasticsearch.inference.InferenceResults} - * objects to fully reconstruct them. In particular, depending on which - * fields are written (result value, feature importance) it is not possible to - * distinguish between a Regression result and a Classification result. - * - * This class parses the union all possible fields that may be written by - * InferenceResults. - * - * The warning field is mutually exclusive with all the other fields. - */ -public class ParsedInference extends ParsedAggregation { - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ParsedInference.class.getSimpleName(), - true, - args -> new ParsedInference( - args[0], - (List>) args[1], - (List) args[2], - (String) args[3], - (Double) args[4], - (Double) args[5] - ) - ); - - static { - PARSER.declareField(optionalConstructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" - + ParsedInference.class.getSimpleName() - + "] failed to parse field [" - + CommonFields.VALUE - + "] " - + "value [" - + token - + "] is not a string, boolean or number" - ); - } - return o; - }, CommonFields.VALUE, ObjectParser.ValueType.VALUE); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> p.map(), - new ParseField(SingleValueInferenceResults.FEATURE_IMPORTANCE) - ); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TopClassEntry.fromXContent(p), - new ParseField(ClassificationConfig.DEFAULT_TOP_CLASSES_RESULTS_FIELD) - ); - PARSER.declareString(optionalConstructorArg(), new ParseField(WarningInferenceResults.NAME)); - PARSER.declareDouble(optionalConstructorArg(), new ParseField(PREDICTION_PROBABILITY)); - PARSER.declareDouble(optionalConstructorArg(), new ParseField(PREDICTION_SCORE)); - declareAggregationFields(PARSER); - } - - public static ParsedInference fromXContent(XContentParser parser, final String name) { - ParsedInference parsed = PARSER.apply(parser, null); - parsed.setName(name); - return parsed; - } - - private final Object value; - private final List> featureImportance; - private final List topClasses; - private final String warning; - private final Double predictionProbability; - private final Double predictionScore; - - ParsedInference( - Object value, - List> featureImportance, - List topClasses, - String warning, - Double predictionProbability, - Double predictionScore - ) { - this.value = value; - this.warning = warning; - this.featureImportance = featureImportance; - this.topClasses = topClasses; - this.predictionProbability = predictionProbability; - this.predictionScore = predictionScore; - } - - public Object getValue() { - return value; - } - - public List> getFeatureImportance() { - return featureImportance; - } - - public List getTopClasses() { - return topClasses; - } - - public String getWarning() { - return warning; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (warning != null) { - builder.field(WarningInferenceResults.WARNING.getPreferredName(), warning); - } else { - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (topClasses != null && topClasses.size() > 0) { - builder.field(ClassificationConfig.DEFAULT_TOP_CLASSES_RESULTS_FIELD, topClasses); - } - if (featureImportance != null && featureImportance.size() > 0) { - builder.field(FEATURE_IMPORTANCE, featureImportance); - } - if (predictionProbability != null) { - builder.field(PREDICTION_PROBABILITY, predictionProbability); - } - if (predictionScore != null) { - builder.field(PREDICTION_SCORE, predictionScore); - } - } - return builder; - } - - @Override - public String getType() { - return InferencePipelineAggregationBuilder.NAME; - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java index f064b37c1fdec..394e7c4e592b5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java @@ -8,12 +8,8 @@ package org.elasticsearch.xpack.ml.aggs.kstest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.ml.MachineLearningTests; import java.util.Arrays; @@ -22,8 +18,6 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.hamcrest.Matchers.equalTo; - public class InternalKSTestAggregationTests extends InternalAggregationTestCase { @Override @@ -31,18 +25,6 @@ protected SearchPlugin registerPlugin() { return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - BucketCountKSTestAggregationBuilder.NAME, - (p, c) -> ParsedKSTest.fromXContent(p, (String) c) - ) - ); - } - @Override protected InternalKSTestAggregation createTestInstance(String name, Map metadata) { List modes = randomSubsetOf(Arrays.stream(Alternative.values()).map(Alternative::toString).collect(Collectors.toList())); @@ -63,12 +45,6 @@ public void testReduceRandom() { expectThrows(UnsupportedOperationException.class, () -> createTestInstance("name", null).reduce(null, null)); } - @Override - protected void assertFromXContent(InternalKSTestAggregation aggregation, ParsedAggregation parsedAggregation) { - ParsedKSTest ks = (ParsedKSTest) parsedAggregation; - assertThat(ks.getModes(), equalTo(aggregation.getModeValues())); - } - @Override protected InternalKSTestAggregation mutateInstance(InternalKSTestAggregation instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/ParsedKSTest.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/ParsedKSTest.java deleted file mode 100644 index 230265eb246ca..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/ParsedKSTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.aggs.kstest; - -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; - -public class ParsedKSTest extends ParsedAggregation { - - @SuppressWarnings("unchecked") - public static ParsedKSTest fromXContent(XContentParser parser, final String name) throws IOException { - Map values = parser.map(); - Map doubleValues = Maps.newMapWithExpectedSize(values.size()); - for (Alternative alternative : Alternative.values()) { - Double value = (Double) values.get(alternative.toString()); - if (value != null) { - doubleValues.put(alternative.toString(), value); - } - } - ParsedKSTest parsed = new ParsedKSTest( - doubleValues, - (Map) values.get(InternalAggregation.CommonFields.META.getPreferredName()) - ); - parsed.setName(name); - return parsed; - } - - private final Map modes; - - ParsedKSTest(Map modes, Map metadata) { - this.modes = modes; - this.metadata = metadata; - } - - Map getModes() { - return modes; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - for (Map.Entry kv : modes.entrySet()) { - builder.field(kv.getKey(), kv.getValue()); - } - return builder; - } - - @Override - public String getType() { - return BucketCountKSTestAggregationBuilder.NAME.getPreferredName(); - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java index d4141c61a48d4..e98c8a10b577f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.annotations.Annotation; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; @@ -46,6 +45,7 @@ import org.elasticsearch.xpack.ml.annotations.AnnotationPersister; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetector; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 9ba4604458b93..5b2cd8f78d02e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -17,15 +17,15 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import org.junit.Before; @@ -119,7 +119,7 @@ public void setUpTests() { } public void testExtraction() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 3, @@ -189,7 +189,7 @@ public void testExtractionGivenResponseHasNullAggs() throws IOException { public void testExtractionGivenResponseHasEmptyAggs() throws IOException { TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - Aggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); + InternalAggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); SearchResponse response = createSearchResponse(emptyAggs); extractor.setNextResponse(response); @@ -215,12 +215,12 @@ public void testExtractionGivenResponseHasEmptyHistogramAgg() throws IOException public void testExtractionGivenResponseHasMultipleTopLevelAggs() { TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - Histogram histogram1 = mock(Histogram.class); + InternalHistogram histogram1 = mock(InternalHistogram.class); when(histogram1.getName()).thenReturn("hist_1"); - Histogram histogram2 = mock(Histogram.class); + InternalHistogram histogram2 = mock(InternalHistogram.class); when(histogram2.getName()).thenReturn("hist_2"); - Aggregations aggs = AggregationTestUtils.createAggs(Arrays.asList(histogram1, histogram2)); + InternalAggregations aggs = AggregationTestUtils.createAggs(Arrays.asList(histogram1, histogram2)); SearchResponse response = createSearchResponse(aggs); extractor.setNextResponse(response); @@ -240,7 +240,7 @@ public void testExtractionGivenCancelBeforeNext() { public void testExtractionGivenCancelHalfWay() throws IOException { int buckets = 1200; - List histogramBuckets = new ArrayList<>(buckets); + List histogramBuckets = new ArrayList<>(buckets); long timestamp = 1000; for (int i = 0; i < buckets; i++) { histogramBuckets.add( @@ -312,16 +312,16 @@ private AggregationDataExtractorContext createContext(long start, long end) { } @SuppressWarnings("unchecked") - private SearchResponse createSearchResponse(String histogramName, List histogramBuckets) { - Histogram histogram = mock(Histogram.class); + private SearchResponse createSearchResponse(String histogramName, List histogramBuckets) { + InternalHistogram histogram = mock(InternalHistogram.class); when(histogram.getName()).thenReturn(histogramName); - when((List) histogram.getBuckets()).thenReturn(histogramBuckets); + when(histogram.getBuckets()).thenReturn(histogramBuckets); - Aggregations searchAggs = AggregationTestUtils.createAggs(Collections.singletonList(histogram)); + InternalAggregations searchAggs = AggregationTestUtils.createAggs(Collections.singletonList(histogram)); return createSearchResponse(searchAggs); } - private SearchResponse createSearchResponse(Aggregations aggregations) { + private SearchResponse createSearchResponse(InternalAggregations aggregations) { SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.status()).thenReturn(RestStatus.OK); when(searchResponse.getScrollId()).thenReturn(randomAlphaOfLength(1000)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index d95f8b8f5e52a..afd3c3534f7c3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -9,19 +9,18 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.Avg; -import org.elasticsearch.search.aggregations.metrics.GeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.Max; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.Percentile; -import org.elasticsearch.search.aggregations.metrics.Percentiles; import java.util.ArrayList; import java.util.Collections; @@ -36,25 +35,25 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} - static Histogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - Histogram.Bucket bucket = mock(Histogram.Bucket.class); + static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { + InternalHistogram.Bucket bucket = mock(InternalHistogram.Bucket.class); when(bucket.getKey()).thenReturn(timestamp); when(bucket.getDocCount()).thenReturn(docCount); - Aggregations aggs = createAggs(subAggregations); + InternalAggregations aggs = createAggs(subAggregations); when(bucket.getAggregations()).thenReturn(aggs); return bucket; } - static CompositeAggregation.Bucket createCompositeBucket( + static InternalComposite.InternalBucket createCompositeBucket( long timestamp, String dateValueSource, long docCount, - List subAggregations, + List subAggregations, List> termValues ) { - CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); when(bucket.getDocCount()).thenReturn(docCount); - Aggregations aggs = createAggs(subAggregations); + InternalAggregations aggs = createAggs(subAggregations); when(bucket.getAggregations()).thenReturn(aggs); Map bucketKey = new HashMap<>(); bucketKey.put(dateValueSource, timestamp); @@ -65,34 +64,34 @@ static CompositeAggregation.Bucket createCompositeBucket( return bucket; } - static SingleBucketAggregation createSingleBucketAgg(String name, long docCount, List subAggregations) { - SingleBucketAggregation singleBucketAggregation = mock(SingleBucketAggregation.class); + static InternalSingleBucketAggregation createSingleBucketAgg(String name, long docCount, List subAggregations) { + InternalSingleBucketAggregation singleBucketAggregation = mock(InternalSingleBucketAggregation.class); when(singleBucketAggregation.getName()).thenReturn(name); when(singleBucketAggregation.getDocCount()).thenReturn(docCount); when(singleBucketAggregation.getAggregations()).thenReturn(createAggs(subAggregations)); return singleBucketAggregation; } - static Histogram.Bucket createHistogramBucket(long timestamp, long docCount) { + static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount) { return createHistogramBucket(timestamp, docCount, Collections.emptyList()); } - static Aggregations createAggs(List aggsList) { - return new Aggregations(aggsList); + static InternalAggregations createAggs(List aggsList) { + return InternalAggregations.from(aggsList); } @SuppressWarnings("unchecked") - static Histogram createHistogramAggregation(String name, List histogramBuckets) { - Histogram histogram = mock(Histogram.class); - when((List) histogram.getBuckets()).thenReturn(histogramBuckets); + static InternalHistogram createHistogramAggregation(String name, List histogramBuckets) { + InternalHistogram histogram = mock(InternalHistogram.class); + when(histogram.getBuckets()).thenReturn(histogramBuckets); when(histogram.getName()).thenReturn(name); return histogram; } @SuppressWarnings("unchecked") - static CompositeAggregation createCompositeAggregation(String name, List buckets) { - CompositeAggregation compositeAggregation = mock(CompositeAggregation.class); - when((List) compositeAggregation.getBuckets()).thenReturn(buckets); + static InternalComposite createCompositeAggregation(String name, List buckets) { + InternalComposite compositeAggregation = mock(InternalComposite.class); + when(compositeAggregation.getBuckets()).thenReturn(buckets); when(compositeAggregation.getName()).thenReturn(name); return compositeAggregation; @@ -102,16 +101,16 @@ static Max createMax(String name, double value) { return new Max(name, value, DocValueFormat.RAW, null); } - static Avg createAvg(String name, double value) { - Avg avg = mock(Avg.class); + static InternalAvg createAvg(String name, double value) { + InternalAvg avg = mock(InternalAvg.class); when(avg.getName()).thenReturn(name); when(avg.value()).thenReturn(value); when(avg.getValue()).thenReturn(value); return avg; } - static GeoCentroid createGeoCentroid(String name, long count, double lat, double lon) { - GeoCentroid centroid = mock(GeoCentroid.class); + static InternalGeoCentroid createGeoCentroid(String name, long count, double lat, double lon) { + InternalGeoCentroid centroid = mock(InternalGeoCentroid.class); when(centroid.count()).thenReturn(count); when(centroid.getName()).thenReturn(name); GeoPoint point = count > 0 ? new GeoPoint(lat, lon) : null; @@ -119,23 +118,23 @@ static GeoCentroid createGeoCentroid(String name, long count, double lat, double return centroid; } - static NumericMetricsAggregation.SingleValue createSingleValue(String name, double value) { - NumericMetricsAggregation.SingleValue singleValue = mock(NumericMetricsAggregation.SingleValue.class); + static InternalNumericMetricsAggregation.SingleValue createSingleValue(String name, double value) { + InternalNumericMetricsAggregation.SingleValue singleValue = mock(InternalNumericMetricsAggregation.SingleValue.class); when(singleValue.getName()).thenReturn(name); when(singleValue.value()).thenReturn(value); return singleValue; } @SuppressWarnings("unchecked") - static Terms createTerms(String name, Term... terms) { - Terms termsAgg = mock(Terms.class); + static StringTerms createTerms(String name, Term... terms) { + StringTerms termsAgg = mock(StringTerms.class); when(termsAgg.getName()).thenReturn(name); - List buckets = new ArrayList<>(); + List buckets = new ArrayList<>(); for (Term term : terms) { StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); when(bucket.getKey()).thenReturn(term.key); when(bucket.getDocCount()).thenReturn(term.count); - List numericAggs = new ArrayList<>(); + List numericAggs = new ArrayList<>(); if (term.hasBuckekAggs()) { when(bucket.getAggregations()).thenReturn(createAggs(term.bucketAggs)); } else { @@ -143,18 +142,18 @@ static Terms createTerms(String name, Term... terms) { numericAggs.add(createSingleValue(keyValue.getKey(), keyValue.getValue())); } if (numericAggs.isEmpty() == false) { - Aggregations aggs = createAggs(numericAggs); + InternalAggregations aggs = createAggs(numericAggs); when(bucket.getAggregations()).thenReturn(aggs); } } buckets.add(bucket); } - when((List) termsAgg.getBuckets()).thenReturn(buckets); + when(termsAgg.getBuckets()).thenReturn(buckets); return termsAgg; } - static Percentiles createPercentiles(String name, double... values) { - Percentiles percentiles = mock(Percentiles.class); + static InternalTDigestPercentiles createPercentiles(String name, double... values) { + InternalTDigestPercentiles percentiles = mock(InternalTDigestPercentiles.class); when(percentiles.getName()).thenReturn(name); List percentileList = new ArrayList<>(); for (double value : values) { @@ -168,7 +167,7 @@ static class Term { String key; long count; Map values; - List bucketAggs; + List bucketAggs; Term(String key, long count) { this(key, count, Collections.emptyMap()); @@ -184,7 +183,7 @@ static class Term { this.values = values; } - Term(String key, long count, List bucketAggs) { + Term(String key, long count, List bucketAggs) { this(key, count); this.bucketAggs = bucketAggs; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java index 47ed20ce7cbb9..a3edd63295cea 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java @@ -8,10 +8,10 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Max; @@ -60,22 +60,22 @@ public void setValues() { } public void testProcessGivenMultipleDateHistogramsOrComposite() { - Aggregation nestedBucket; + InternalAggregation nestedBucket; if (randomBoolean()) { - List nestedHistogramBuckets = Arrays.asList( + List nestedHistogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, Collections.singletonList(createMax("metric1", 1200))), createHistogramBucket(2000L, 5, Collections.singletonList(createMax("metric1", 2800))) ); nestedBucket = createHistogramAggregation("buckets", nestedHistogramBuckets); } else { - List nestedCompositebuckets = Arrays.asList( + List nestedCompositebuckets = Arrays.asList( createCompositeBucket(1000L, "time", 3, Collections.singletonList(createMax("metric1", 1200)), Collections.emptyList()), createCompositeBucket(2000L, "time", 5, Collections.singletonList(createMax("metric1", 2800)), Collections.emptyList()) ); nestedBucket = createCompositeAggregation("buckets", nestedCompositebuckets); } - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, Arrays.asList(createMax("time", 1000L), nestedBucket)) ); @@ -93,7 +93,7 @@ public void testProcessGivenMultipleDateHistogramsOrComposite() { } public void testProcessGivenMaxTimeIsMissing() { - List histogramBuckets = Arrays.asList(createHistogramBucket(1000L, 3), createHistogramBucket(2000L, 5)); + List histogramBuckets = Arrays.asList(createHistogramBucket(1000L, 3), createHistogramBucket(2000L, 5)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -101,7 +101,7 @@ public void testProcessGivenMaxTimeIsMissing() { ); assertThat(e.getMessage(), containsString("Missing max aggregation for time_field [time]")); - List compositeBuckets = Arrays.asList( + List compositeBuckets = Arrays.asList( createCompositeBucket(1000L, "time", 3, Collections.emptyList(), Collections.emptyList()), createCompositeBucket(2000L, "time", 5, Collections.emptyList(), Collections.emptyList()) ); @@ -111,8 +111,8 @@ public void testProcessGivenMaxTimeIsMissing() { } public void testProcessGivenNonMaxTimeAgg() { - List aggs = Collections.singletonList(createTerms("time", new Term("a", 1), new Term("b", 2))); - List histogramBuckets = Arrays.asList( + List aggs = Collections.singletonList(createTerms("time", new Term("a", 1), new Term("b", 2))); + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, aggs), createHistogramBucket(2000L, 5, aggs) ); @@ -123,7 +123,7 @@ public void testProcessGivenNonMaxTimeAgg() { ); assertThat(e.getMessage(), containsString("Missing max aggregation for time_field [time]")); - List compositeBuckets = Arrays.asList( + List compositeBuckets = Arrays.asList( createCompositeBucket(1000L, "time", 3, aggs, Collections.emptyList()), createCompositeBucket(2000L, "time", 5, aggs, Collections.emptyList()) ); @@ -133,7 +133,7 @@ public void testProcessGivenNonMaxTimeAgg() { } public void testProcessGivenHistogramOnly() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, Collections.singletonList(createMax("timestamp", 1200))), createHistogramBucket(2000L, 5, Collections.singletonList(createMax("timestamp", 2800))) ); @@ -146,7 +146,7 @@ public void testProcessGivenHistogramOnly() throws IOException { } public void testProcessGivenHistogramOnlyAndNoDocCount() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, Collections.singletonList(createMax("time", 1000))), createHistogramBucket(2000L, 5, Collections.singletonList(createMax("time", 2000))) ); @@ -160,7 +160,7 @@ public void testProcessGivenHistogramOnlyAndNoDocCount() throws IOException { public void testProcessGivenCompositeOnly() throws IOException { compositeAggValueSource = "timestamp"; - List compositeBuckets = Arrays.asList( + List compositeBuckets = Arrays.asList( createCompositeBucket(1000L, "timestamp", 3, Collections.singletonList(createMax("timestamp", 1200)), Collections.emptyList()), createCompositeBucket(2000L, "timestamp", 5, Collections.singletonList(createMax("timestamp", 2800)), Collections.emptyList()) ); @@ -173,7 +173,7 @@ public void testProcessGivenCompositeOnly() throws IOException { } public void testProcessGivenCompositeOnlyAndNoDocCount() throws IOException { - List compositeBuckets = Arrays.asList( + List compositeBuckets = Arrays.asList( createCompositeBucket(1000L, "time", 3, Collections.singletonList(createMax("time", 1000)), Collections.emptyList()), createCompositeBucket(2000L, "time", 5, Collections.singletonList(createMax("time", 2000)), Collections.emptyList()) ); @@ -187,7 +187,7 @@ public void testProcessGivenCompositeOnlyAndNoDocCount() throws IOException { public void testProcessGivenCompositeWithDocAndTerms() throws IOException { compositeAggValueSource = "timestamp"; - List compositeBuckets = Arrays.asList( + List compositeBuckets = Arrays.asList( createCompositeBucket( 1000L, "timestamp", @@ -218,22 +218,21 @@ public void testProcessGivenCompositeWithDocAndTerms() throws IOException { } public void testProcessGivenTopLevelAggIsNotHistogram() throws IOException { - - List histogramABuckets = Arrays.asList( + List histogramABuckets = Arrays.asList( createHistogramBucket(1000L, 3, Arrays.asList(createMax("time", 1000), createSingleValue("my_value", 1.0))), createHistogramBucket(2000L, 4, Arrays.asList(createMax("time", 2000), createSingleValue("my_value", 2.0))), createHistogramBucket(3000L, 5, Arrays.asList(createMax("time", 3000), createSingleValue("my_value", 3.0))) ); - Histogram histogramA = createHistogramAggregation("buckets", histogramABuckets); + InternalHistogram histogramA = createHistogramAggregation("buckets", histogramABuckets); - List histogramBBuckets = Arrays.asList( + List histogramBBuckets = Arrays.asList( createHistogramBucket(1000L, 6, Arrays.asList(createMax("time", 1000), createSingleValue("my_value", 10.0))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createSingleValue("my_value", 20.0))), createHistogramBucket(3000L, 8, Arrays.asList(createMax("time", 3000), createSingleValue("my_value", 30.0))) ); - Histogram histogramB = createHistogramAggregation("buckets", histogramBBuckets); + InternalHistogram histogramB = createHistogramAggregation("buckets", histogramBBuckets); - Terms terms = createTerms( + StringTerms terms = createTerms( "my_field", new Term("A", 20, Collections.singletonList(histogramA)), new Term("B", 2, Collections.singletonList(histogramB)) @@ -250,7 +249,7 @@ public void testProcessGivenTopLevelAggIsNotHistogram() throws IOException { } public void testProcessGivenSingleMetricPerHistogram() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 3, Arrays.asList(createMax("time", 1000), createSingleValue("my_value", 1.0))), createHistogramBucket( 2000L, @@ -269,7 +268,7 @@ public void testProcessGivenSingleMetricPerHistogram() throws IOException { } public void testProcessGivenTermsPerHistogram() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 4, @@ -301,7 +300,7 @@ public void testProcessGivenTermsPerHistogram() throws IOException { } public void testProcessGivenSingleMetricPerSingleTermsPerHistogram() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 4, @@ -368,7 +367,7 @@ public void testProcessGivenMultipleSingleMetricPerSingleTermsPerHistogram() thr Map b4NumericAggs = new LinkedHashMap<>(); b4NumericAggs.put("my_value", 421.0); b4NumericAggs.put("my_value2", 422.0); - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 4, @@ -415,10 +414,10 @@ public void testProcessGivenMultipleSingleMetricPerSingleTermsPerHistogram() thr } public void testProcessGivenUnsupportedAggregationUnderHistogram() { - Histogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); - Aggregation anotherHistogram = mock(Aggregation.class); + InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); + InternalAggregation anotherHistogram = mock(InternalAggregation.class); when(anotherHistogram.getName()).thenReturn("nested-agg"); - Aggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), anotherHistogram)); + InternalAggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), anotherHistogram)); when(histogramBucket.getAggregations()).thenReturn(subAggs); IllegalArgumentException e = expectThrows( @@ -429,12 +428,12 @@ public void testProcessGivenUnsupportedAggregationUnderHistogram() { } public void testProcessGivenMultipleBucketAggregations() { - Histogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); - Terms terms1 = mock(Terms.class); + InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); + StringTerms terms1 = mock(StringTerms.class); when(terms1.getName()).thenReturn("terms_1"); - Terms terms2 = mock(Terms.class); + StringTerms terms2 = mock(StringTerms.class); when(terms2.getName()).thenReturn("terms_2"); - Aggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), terms1, terms2)); + InternalAggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), terms1, terms2)); when(histogramBucket.getAggregations()).thenReturn(subAggs); IllegalArgumentException e = expectThrows( @@ -445,9 +444,9 @@ public void testProcessGivenMultipleBucketAggregations() { } public void testProcessGivenMixedBucketAndLeafAggregationsAtSameLevel_BucketFirst() throws IOException { - Terms terms = createTerms("terms", new Term("a", 1), new Term("b", 2)); + StringTerms terms = createTerms("terms", new Term("a", 1), new Term("b", 2)); Max maxAgg = createMax("max_value", 1200); - Histogram.Bucket histogramBucket = createHistogramBucket(1000L, 2, Arrays.asList(terms, createMax("time", 1000), maxAgg)); + InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2, Arrays.asList(terms, createMax("time", 1000), maxAgg)); String json = aggToString(Sets.newHashSet("terms", "max_value"), histogramBucket); assertThat(json, equalTo(""" @@ -457,8 +456,8 @@ public void testProcessGivenMixedBucketAndLeafAggregationsAtSameLevel_BucketFirs public void testProcessGivenMixedBucketAndLeafAggregationsAtSameLevel_LeafFirst() throws IOException { Max maxAgg = createMax("max_value", 1200); - Terms terms = createTerms("terms", new Term("a", 1), new Term("b", 2)); - Histogram.Bucket histogramBucket = createHistogramBucket(1000L, 2, Arrays.asList(createMax("time", 1000), maxAgg, terms)); + StringTerms terms = createTerms("terms", new Term("a", 1), new Term("b", 2)); + InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2, Arrays.asList(createMax("time", 1000), maxAgg, terms)); String json = aggToString(Sets.newHashSet("terms", "max_value"), histogramBucket); assertThat(json, equalTo(""" @@ -467,7 +466,7 @@ public void testProcessGivenMixedBucketAndLeafAggregationsAtSameLevel_LeafFirst( } public void testProcessGivenBucketAndLeafAggregationsButBucketNotInFields() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 4, @@ -507,7 +506,7 @@ public void testProcessGivenBucketAndLeafAggregationsButBucketNotInFields() thro } public void testProcessGivenSinglePercentilesPerHistogram() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createPercentiles("my_field", 1.0))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createPercentiles("my_field", 2.0))), createHistogramBucket( @@ -528,7 +527,7 @@ public void testProcessGivenSinglePercentilesPerHistogram() throws IOException { } public void testProcessGivenMultiplePercentilesPerHistogram() { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createPercentiles("my_field", 1.0))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createPercentiles("my_field", 2.0, 5.0))), createHistogramBucket(3000L, 10, Arrays.asList(createMax("time", 3000), createPercentiles("my_field", 3.0))), @@ -556,25 +555,25 @@ public void testBucketAggContainsRequiredAgg() { when(termsAgg.getName()).thenReturn("bar"); assertFalse(processor.bucketAggContainsRequiredAgg(termsAgg)); - Terms nestedTermsAgg = mock(Terms.class); + StringTerms nestedTermsAgg = mock(StringTerms.class); when(nestedTermsAgg.getBuckets()).thenReturn(Collections.emptyList()); when(nestedTermsAgg.getName()).thenReturn("nested_terms"); StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); - when(bucket.getAggregations()).thenReturn(new Aggregations(Collections.singletonList(nestedTermsAgg))); + when(bucket.getAggregations()).thenReturn(InternalAggregations.from(Collections.singletonList(nestedTermsAgg))); when((List) termsAgg.getBuckets()).thenReturn(Collections.singletonList(bucket)); assertFalse(processor.bucketAggContainsRequiredAgg(termsAgg)); Max max = mock(Max.class); when(max.getName()).thenReturn("foo"); StringTerms.Bucket nestedTermsBucket = mock(StringTerms.Bucket.class); - when(nestedTermsBucket.getAggregations()).thenReturn(new Aggregations(Collections.singletonList(max))); - when((List) nestedTermsAgg.getBuckets()).thenReturn(Collections.singletonList(nestedTermsBucket)); + when(nestedTermsBucket.getAggregations()).thenReturn(InternalAggregations.from(Collections.singletonList(max))); + when(nestedTermsAgg.getBuckets()).thenReturn(Collections.singletonList(nestedTermsBucket)); assertTrue(processor.bucketAggContainsRequiredAgg(termsAgg)); } public void testBucketBeforeStartIsPruned() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createPercentiles("my_field", 1.0))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createPercentiles("my_field", 2.0))), createHistogramBucket(3000L, 10, Arrays.asList(createMax("time", 3000), createPercentiles("my_field", 3.0))), @@ -591,7 +590,7 @@ public void testBucketBeforeStartIsPruned() throws IOException { } public void testBucketsBeforeStartArePruned() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createPercentiles("my_field", 1.0))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createPercentiles("my_field", 2.0))), createHistogramBucket(3000L, 10, Arrays.asList(createMax("time", 3000), createPercentiles("my_field", 3.0))), @@ -607,7 +606,7 @@ public void testBucketsBeforeStartArePruned() throws IOException { } public void testSingleBucketAgg() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket( 1000L, 4, @@ -636,8 +635,7 @@ public void testSingleBucketAgg() throws IOException { } public void testSingleBucketAgg_failureWithSubMultiBucket() { - - List histogramBuckets = Collections.singletonList( + List histogramBuckets = Collections.singletonList( createHistogramBucket( 1000L, 4, @@ -661,7 +659,7 @@ public void testSingleBucketAgg_failureWithSubMultiBucket() { } public void testGeoCentroidAgg() throws IOException { - List histogramBuckets = Arrays.asList( + List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createGeoCentroid("geo_field", 4, 92.1, 93.1))), createHistogramBucket(2000L, 7, Arrays.asList(createMax("time", 2000), createGeoCentroid("geo_field", 0, -1, -1))) ); @@ -672,16 +670,16 @@ public void testGeoCentroidAgg() throws IOException { {"time":2000,"doc_count":7}""")); } - private String aggToString(Set fields, Histogram.Bucket bucket) throws IOException { + private String aggToString(Set fields, InternalHistogram.Bucket bucket) throws IOException { return aggToString(fields, Collections.singletonList(bucket)); } - private String aggToString(Set fields, List buckets) throws IOException { - Histogram histogram = createHistogramAggregation("buckets", buckets); + private String aggToString(Set fields, List buckets) throws IOException { + InternalHistogram histogram = createHistogramAggregation("buckets", buckets); return aggToString(fields, createAggs(Collections.singletonList(histogram))); } - private String aggToString(Set fields, Aggregations aggregations) throws IOException { + private String aggToString(Set fields, InternalAggregations aggregations) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AggregationToJsonProcessor processor = new AggregationToJsonProcessor( @@ -697,8 +695,8 @@ private String aggToString(Set fields, Aggregations aggregations) throws return outputStream.toString(StandardCharsets.UTF_8.name()); } - private String aggToStringComposite(Set fields, List buckets) throws IOException { - CompositeAggregation compositeAggregation = createCompositeAggregation("buckets", buckets); + private String aggToStringComposite(Set fields, List buckets) throws IOException { + InternalComposite compositeAggregation = createCompositeAggregation("buckets", buckets); return aggToString(fields, createAggs(Collections.singletonList(compositeAggregation))); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java index 037976f9dec9b..6cc432dd4831f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java @@ -19,18 +19,18 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.junit.Before; import java.io.BufferedReader; @@ -127,7 +127,7 @@ public void setUpTests() { } public void testExtraction() throws IOException { - List compositeBucket = Arrays.asList( + List compositeBucket = Arrays.asList( createCompositeBucket( 1000L, "time_bucket", @@ -208,7 +208,7 @@ public void testExtractionGivenResponseHasNullAggs() throws IOException { public void testExtractionGivenResponseHasEmptyAggs() throws IOException { TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - Aggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); + InternalAggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); SearchResponse response = createSearchResponse(emptyAggs); extractor.setNextResponse(response); @@ -231,7 +231,7 @@ public void testExtractionGivenCancelBeforeNext() { public void testExtractionCancelOnFirstPage() throws IOException { int numBuckets = 10; - List buckets = new ArrayList<>(numBuckets); + List buckets = new ArrayList<>(numBuckets); long timestamp = 1000; for (int i = 0; i < numBuckets; i++) { buckets.add( @@ -260,7 +260,7 @@ public void testExtractionCancelOnFirstPage() throws IOException { public void testExtractionGivenCancelHalfWay() throws IOException { int numBuckets = 10; - List buckets = new ArrayList<>(numBuckets); + List buckets = new ArrayList<>(numBuckets); long timestamp = 1000; for (int i = 0; i < numBuckets; i++) { buckets.add( @@ -345,17 +345,21 @@ private CompositeAggregationDataExtractorContext createContext(long start, long } @SuppressWarnings("unchecked") - private SearchResponse createSearchResponse(String aggName, List buckets, Map afterKey) { - CompositeAggregation compositeAggregation = mock(CompositeAggregation.class); + private SearchResponse createSearchResponse( + String aggName, + List buckets, + Map afterKey + ) { + InternalComposite compositeAggregation = mock(InternalComposite.class); when(compositeAggregation.getName()).thenReturn(aggName); when(compositeAggregation.afterKey()).thenReturn(afterKey); - when((List) compositeAggregation.getBuckets()).thenReturn(buckets); + when(compositeAggregation.getBuckets()).thenReturn(buckets); - Aggregations searchAggs = AggregationTestUtils.createAggs(Collections.singletonList(compositeAggregation)); + InternalAggregations searchAggs = AggregationTestUtils.createAggs(Collections.singletonList(compositeAggregation)); return createSearchResponse(searchAggs); } - private SearchResponse createSearchResponse(Aggregations aggregations) { + private SearchResponse createSearchResponse(InternalAggregations aggregations) { SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.status()).thenReturn(RestStatus.OK); when(searchResponse.getScrollId()).thenReturn(randomAlphaOfLength(1000)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index 12ce45a186d62..7c8d2572461d4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -19,16 +19,16 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.junit.Before; import org.mockito.Mockito; @@ -431,12 +431,12 @@ public void testExtractionGivenAutoChunkAndIntermediateEmptySearchShouldReconfig InputStream inputStream1 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(100_000L, 200_000L), inputStream1); - when(dataExtractorFactory.newExtractor(100000L, 200000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(100_000L, 200_000L), inputStream1); + when(dataExtractorFactory.newExtractor(100000L, 200000L)).thenReturn(subExtractor1); // This one is empty - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(200_000L, 300_000L)); - when(dataExtractorFactory.newExtractor(200000, 300000L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(200_000L, 300_000L)); + when(dataExtractorFactory.newExtractor(200000, 300000L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -447,8 +447,8 @@ public void testExtractionGivenAutoChunkAndIntermediateEmptySearchShouldReconfig // This is the last one InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtactor3 = new StubSubExtractor(new SearchInterval(200_000L, 400_000L), inputStream2); - when(dataExtractorFactory.newExtractor(200000, 400000)).thenReturn(subExtactor3); + DataExtractor subExtractor3 = new StubSubExtractor(new SearchInterval(200_000L, 400_000L), inputStream2); + when(dataExtractorFactory.newExtractor(200000, 400000)).thenReturn(subExtractor3); assertEquals(inputStream2, extractor.next().data().get()); assertThat(extractor.next().data().isPresent(), is(false)); @@ -464,7 +464,7 @@ public void testExtractionGivenAutoChunkAndIntermediateEmptySearchShouldReconfig String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); assertThat(searchRequest, containsString("\"gte\":100000,\"lt\":400000")); searchRequest = capturedSearchRequests.get(1).toString().replaceAll("\\s", ""); - assertThat(searchRequest, containsString("\"gte\":200000,\"lt\":400000")); + assertThat(searchRequest, containsString("\"gte\":300000,\"lt\":400000")); } public void testCancelGivenNextWasNeverCalled() { @@ -555,10 +555,11 @@ private SearchResponse createSearchResponse(long totalHits, long earliestTime, l SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.status()).thenReturn(RestStatus.OK); SearchHit[] hits = new SearchHit[(int) totalHits]; - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1); + Arrays.fill(hits, SearchHit.unpooled(1)); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1); when(searchResponse.getHits()).thenReturn(searchHits); - List aggs = new ArrayList<>(); + List aggs = new ArrayList<>(); Min min = mock(Min.class); when(min.value()).thenReturn((double) earliestTime); when(min.getName()).thenReturn("earliest_time"); @@ -567,8 +568,7 @@ private SearchResponse createSearchResponse(long totalHits, long earliestTime, l when(max.value()).thenReturn((double) latestTime); when(max.getName()).thenReturn("latest_time"); aggs.add(max); - Aggregations aggregations = new Aggregations(aggs) { - }; + InternalAggregations aggregations = InternalAggregations.from(aggs); when(searchResponse.getAggregations()).thenReturn(aggregations); return searchResponse; } @@ -579,7 +579,7 @@ private SearchResponse createNullSearchResponse() { SearchHits searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1); when(searchResponse.getHits()).thenReturn(searchHits); - List aggs = new ArrayList<>(); + List aggs = new ArrayList<>(); Min min = mock(Min.class); when(min.value()).thenReturn(Double.POSITIVE_INFINITY); when(min.getName()).thenReturn("earliest_time"); @@ -588,8 +588,7 @@ private SearchResponse createNullSearchResponse() { when(max.value()).thenReturn(Double.POSITIVE_INFINITY); when(max.getName()).thenReturn("latest_time"); aggs.add(max); - Aggregations aggregations = new Aggregations(aggs) { - }; + InternalAggregations aggregations = InternalAggregations.from(aggs); when(searchResponse.getAggregations()).thenReturn(aggregations); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index bf7aa465ee604..f3eab09b7bc2e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -36,9 +36,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.TimeField; @@ -547,7 +547,8 @@ private SearchResponse createSearchResponse(List timestamps, List hits.add(hit); } SearchHits searchHits = new SearchHits(hits.toArray(SearchHits.EMPTY), new TotalHits(hits.size(), TotalHits.Relation.EQUAL_TO), 1); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); when(searchResponse.getTook()).thenReturn(TimeValue.timeValueMillis(randomNonNegativeLong())); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index 63afc4ef6659c..8d8cded819e23 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -243,10 +243,12 @@ public void testPersistProgress_ProgressDocumentCreated() throws IOException { } public void testPersistProgress_ProgressDocumentUpdated() throws IOException { - testPersistProgress( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistProgress(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } public void testSetFailed() throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 7bc3d507ecf22..993e00bd4adf4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -654,7 +654,8 @@ private SearchResponse createSearchResponse(List field1Values, List buildSearchHits(List> vals) { - return vals.stream() - .map(InferenceRunnerTests::fromMap) - .map(reference -> SearchHit.createFromMap(Collections.singletonMap("_source", reference))) - .collect(Collectors.toCollection(ArrayDeque::new)); + return vals.stream().map(InferenceRunnerTests::fromMap).map(reference -> { + var pooled = SearchHit.createFromMap(Collections.singletonMap("_source", reference)); + try { + return pooled.asUnpooled(); + } finally { + pooled.decRef(); + } + }).collect(Collectors.toCollection(ArrayDeque::new)); } private static BytesReference fromMap(Map map) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java index d9176b74d2d3f..c308f95d483a5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.dataframe.process; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.license.License; import org.elasticsearch.test.ESTestCase; @@ -102,7 +102,7 @@ public void testPersistAllDocs() { }).when(trainedModelProvider).storeTrainedModelMetadata(any(TrainedModelMetadata.class), any(ActionListener.class)); doAnswer(invocationOnMock -> { - ActionListener storeListener = (ActionListener) invocationOnMock.getArguments()[0]; + ActionListener storeListener = (ActionListener) invocationOnMock.getArguments()[0]; storeListener.onResponse(null); return null; }).when(trainedModelProvider).refreshInferenceIndex(any(ActionListener.class)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index 99dfd9e919a6a..3a95a3bb65f10 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -309,7 +309,7 @@ private void givenDataFrameBatches(List> batche } private static SearchHit newHit(String json) { - SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10)); + SearchHit hit = SearchHit.unpooled(randomInt(), randomAlphaOfLength(10)); hit.sourceRef(new BytesArray(json)); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java index 79044a465442b..f52d05fc3220d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java @@ -9,14 +9,19 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; @@ -25,48 +30,36 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; -import static org.elasticsearch.search.rank.RankBuilder.WINDOW_SIZE_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class LearningToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { private static LearningToRankService learningToRankService = mock(LearningToRankService.class); - @Override - protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { - String fieldName = null; - LearningToRankRescorerBuilder rescorer = null; - Integer windowSize = null; - XContentParser.Token token = parser.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if (WINDOW_SIZE_FIELD.match(fieldName, parser.getDeprecationHandler())) { - windowSize = parser.intValue(); - } else { - throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); + public void testRequiredWindowSize() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { + LearningToRankRescorerBuilder testInstance = createTestInstance(); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.startObject(); + testInstance.doXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + try (XContentParser parser = JsonXContent.jsonXContent.createParser(parserConfig(), Strings.toString(builder))) { + ParsingException e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser, (r) -> {})); + assertThat(e.getMessage(), equalTo("window_size is required for rescorer of type [learning_to_rank]")); } - } else if (token == XContentParser.Token.START_OBJECT) { - rescorer = LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService); - } else { - throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } } - if (rescorer == null) { - throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); - } - if (windowSize != null) { - rescorer.windowSize(windowSize); - } - return rescorer; + } + + @Override + protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { + return (LearningToRankRescorerBuilder) RescorerBuilder.parseFromXContent(parser, (r) -> {}); } @Override @@ -85,76 +78,49 @@ protected LearningToRankRescorerBuilder createTestInstance() { learningToRankService ); - if (randomBoolean()) { - builder.windowSize(randomIntBetween(1, 10000)); - } + builder.windowSize(randomIntBetween(1, 10000)); return builder; } @Override protected LearningToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { - return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService); + return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService) + .windowSize(randomIntBetween(1, 10000)); } @Override protected LearningToRankRescorerBuilder mutateInstance(LearningToRankRescorerBuilder instance) throws IOException { - int i = randomInt(4); return switch (i) { - case 0 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), - instance.params(), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; - } + case 0 -> new LearningToRankRescorerBuilder( + randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), + instance.params(), + learningToRankService + ).windowSize(instance.windowSize()); case 1 -> new LearningToRankRescorerBuilder(instance.modelId(), instance.params(), learningToRankService).windowSize( randomValueOtherThan(instance.windowSize(), () -> randomIntBetween(1, 10000)) ); - case 2 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - instance.modelId(), - randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize() + 1); - } - yield builder; - } + case 2 -> new LearningToRankRescorerBuilder( + instance.modelId(), + randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), + learningToRankService + ).windowSize(instance.windowSize()); case 3 -> { LearningToRankConfig learningToRankConfig = randomValueOtherThan( instance.learningToRankConfig(), () -> randomLearningToRankConfig() ); - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - instance.modelId(), - learningToRankConfig, - null, - learningToRankService + yield new LearningToRankRescorerBuilder(instance.modelId(), learningToRankConfig, null, learningToRankService).windowSize( + instance.windowSize() ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; - } - case 4 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - mock(LocalModel.class), - instance.learningToRankConfig(), - instance.params(), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; } + case 4 -> new LearningToRankRescorerBuilder( + mock(LocalModel.class), + instance.learningToRankConfig(), + instance.params(), + learningToRankService + ).windowSize(instance.windowSize()); default -> throw new AssertionError("Unexpected random test case"); }; } @@ -169,31 +135,38 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); + namedXContent.add( + new NamedXContentRegistry.Entry( + RescorerBuilder.class, + LearningToRankRescorerBuilder.NAME, + (p, c) -> LearningToRankRescorerBuilder.fromXContent(p, learningToRankService) + ) + ); return new NamedXContentRegistry(namedXContent); } + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return writableRegistry(); + } + @Override protected NamedWriteableRegistry writableRegistry() { List namedWriteables = new ArrayList<>(new MlInferenceNamedXContentProvider().getNamedWriteables()); namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); - namedWriteables.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + namedWriteables.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + RescorerBuilder.class, + LearningToRankRescorerBuilder.NAME.getPreferredName(), + in -> new LearningToRankRescorerBuilder(in, learningToRankService) + ) + ); return new NamedWriteableRegistry(namedWriteables); } - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return writableRegistry(); - } - private static Map randomParams() { return randomMap(1, randomIntBetween(1, 10), () -> new Tuple<>(randomIdentifier(), randomIdentifier())); } - - private static LocalModel localModelMock() { - LocalModel model = mock(LocalModel.class); - String modelId = randomIdentifier(); - when(model.getModelId()).thenReturn(modelId); - return model; - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java index 39d0af9041d03..026dcca4bfcf7 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java @@ -50,7 +50,7 @@ import static org.mockito.Mockito.verify; public class LearningToRankServiceTests extends ESTestCase { - public static final String GOOD_MODEL = "modelId"; + public static final String GOOD_MODEL = "inferenceEntityId"; public static final String BAD_MODEL = "badModel"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() .setModelId(GOOD_MODEL) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java index ce9d7a9d3640b..d8f1a1fd7433d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java @@ -47,7 +47,7 @@ public void testBuildRequest() throws IOException { ZeroShotClassificationProcessor processor = new ZeroShotClassificationProcessor(tokenizer, config); NlpTask.Request request = processor.getRequestBuilder( - (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config) + (NlpConfig) config.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build()) ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE, -1); Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java index 24b575b8f89be..94e0c533ef5fc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java @@ -349,7 +349,7 @@ public void testGetDefinitionFromDocs() { public void testStoreTrainedModelConfigCallsClientExecuteWithOperationCreate() { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = TrainedModelConfigTests.createTestInstance("modelId").build(); + var config = TrainedModelConfigTests.createTestInstance("inferenceEntityId").build(); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -361,7 +361,7 @@ public void testStoreTrainedModelConfigCallsClientExecuteWithOperationCreate() { public void testStoreTrainedModelConfigCallsClientExecuteWithOperationCreateWhenAllowOverwriteIsFalse() { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = TrainedModelConfigTests.createTestInstance("modelId").build(); + var config = TrainedModelConfigTests.createTestInstance("inferenceEntityId").build(); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -373,7 +373,7 @@ public void testStoreTrainedModelConfigCallsClientExecuteWithOperationCreateWhen public void testStoreTrainedModelConfigCallsClientExecuteWithOperationIndex() { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = TrainedModelConfigTests.createTestInstance("modelId").build(); + var config = TrainedModelConfigTests.createTestInstance("inferenceEntityId").build(); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -385,7 +385,7 @@ public void testStoreTrainedModelConfigCallsClientExecuteWithOperationIndex() { public void testStoreTrainedModelWithDefinitionCallsClientExecuteWithOperationCreate() throws IOException { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = createTrainedModelConfigWithDefinition("modelId"); + var config = createTrainedModelConfigWithDefinition("inferenceEntityId"); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -397,7 +397,7 @@ public void testStoreTrainedModelWithDefinitionCallsClientExecuteWithOperationCr public void testStoreTrainedModelWithDefinitionCallsClientExecuteWithOperationCreateWhenAllowOverwriteIsFalse() throws IOException { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = createTrainedModelConfigWithDefinition("modelId"); + var config = createTrainedModelConfigWithDefinition("inferenceEntityId"); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -409,7 +409,7 @@ public void testStoreTrainedModelWithDefinitionCallsClientExecuteWithOperationCr public void testStoreTrainedModelWithDefinitionCallsClientExecuteWithOperationIndex() throws IOException { try (var threadPool = createThreadPool()) { final var client = createMockClient(threadPool); - var config = createTrainedModelConfigWithDefinition("modelId"); + var config = createTrainedModelConfigWithDefinition("inferenceEntityId"); var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); @@ -461,7 +461,7 @@ public void testStoreTrainedModelVocabularyCallsClientExecuteWithOperationCreate var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); - trainedModelProvider.storeTrainedModelVocabulary("modelId", mock(VocabularyConfig.class), vocab, future); + trainedModelProvider.storeTrainedModelVocabulary("inferenceEntityId", mock(VocabularyConfig.class), vocab, future); assertThatIndexRequestHasOperation(client, DocWriteRequest.OpType.CREATE); } } @@ -473,7 +473,7 @@ public void testStoreTrainedModelVocabularyCallsClientExecuteWithOperationCreate var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); - trainedModelProvider.storeTrainedModelVocabulary("modelId", mock(VocabularyConfig.class), vocab, future, false); + trainedModelProvider.storeTrainedModelVocabulary("inferenceEntityId", mock(VocabularyConfig.class), vocab, future, false); assertThatIndexRequestHasOperation(client, DocWriteRequest.OpType.CREATE); } } @@ -485,7 +485,7 @@ public void testStoreTrainedModelVocabularyCallsClientExecuteWithOperationIndex( var trainedModelProvider = new TrainedModelProvider(client, xContentRegistry()); var future = new PlainActionFuture(); - trainedModelProvider.storeTrainedModelVocabulary("modelId", mock(VocabularyConfig.class), vocab, future, true); + trainedModelProvider.storeTrainedModelVocabulary("inferenceEntityId", mock(VocabularyConfig.class), vocab, future, true); assertThatIndexRequestHasOperation(client, DocWriteRequest.OpType.INDEX); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index baae42b99640f..db81fc2db3348 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -358,10 +358,12 @@ public void testPersistQuantilesSync_QuantilesDocumentCreated() { } public void testPersistQuantilesSync_QuantilesDocumentUpdated() { - testPersistQuantilesSync( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistQuantilesSync(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } @SuppressWarnings("unchecked") @@ -397,10 +399,12 @@ public void testPersistQuantilesAsync_QuantilesDocumentCreated() { } public void testPersistQuantilesAsync_QuantilesDocumentUpdated() { - testPersistQuantilesAsync( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistQuantilesAsync(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 8179a97955a57..3dcbbeb3fcce5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -928,7 +928,8 @@ private static SearchResponse createSearchResponse(List> sou list.add(hit); } SearchHits hits = new SearchHits(list.toArray(SearchHits.EMPTY), new TotalHits(source.size(), TotalHits.Relation.EQUAL_TO), 1); - when(response.getHits()).thenReturn(hits); + when(response.getHits()).thenReturn(hits.asUnpooled()); + hits.decRef(); return response; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index 59a79def9bd10..33e5582ec992a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -137,7 +137,8 @@ public MockClientBuilder prepareSearch(String indexName, List do SearchResponse response = mock(SearchResponse.class); SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0.0f); - when(response.getHits()).thenReturn(searchHits); + when(response.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); doAnswer(new Answer() { @Override @@ -176,7 +177,8 @@ public MockClientBuilder prepareSearchFields(String indexName, List() { @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java index 90280bc08de17..47f7d8c65a27a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java @@ -107,7 +107,8 @@ private static SearchResponse createSearchResponse(List> sou hits[i++] = hit; } SearchHits searchHits = new SearchHits(hits, null, (float) 0.0); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java index 3048a1144ac55..6ec43ca2a3201 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java @@ -97,7 +97,8 @@ static SearchResponse createSearchResponseFromHits(List hits) { 1.0f ); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } @@ -111,7 +112,8 @@ private static SearchResponse createSearchResponse(List to } SearchHits hits = new SearchHits(hitsArray, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(hits); + when(searchResponse.getHits()).thenReturn(hits.asUnpooled()); + hits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java index 520efd5e77244..a7ba148584637 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java @@ -124,7 +124,7 @@ public void testStateRead_StateDocumentCreated() throws IOException { public void testStateRead_StateDocumentUpdated() throws IOException { testStateRead( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), ".ml-state-dummy" ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java index f2affbe6d2869..59a3b86ef0bd5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java @@ -22,7 +22,7 @@ public class SearchHitBuilder { private final SearchHit hit; public SearchHitBuilder(int docId) { - hit = new SearchHit(docId, null); + hit = SearchHit.unpooled(docId, null); } public SearchHitBuilder addField(String name, Object value) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java index 719a9be43080f..ffc00f563161b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.utils; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -67,18 +68,18 @@ public void testGetExistingTaskInfoCallsOnFailureForAnError() { var listener = new PlainActionFuture(); - getDownloadTaskInfo(client, "modelId", false, listener, TIMEOUT); + getDownloadTaskInfo(client, "inferenceEntityId", false, TIMEOUT, () -> "", listener); var exception = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(exception.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(exception.getMessage(), is("Unable to retrieve task information for model id [modelId]")); + assertThat(exception.getMessage(), is("Unable to retrieve task information for model id [inferenceEntityId]")); } public void testGetExistingTaskInfoCallsListenerWithNullWhenNoTasksExist() { var client = mockClientWithTasksResponse(Collections.emptyList(), threadPool); var listener = new PlainActionFuture(); - getDownloadTaskInfo(client, "modelId", false, listener, TIMEOUT); + getDownloadTaskInfo(client, "inferenceEntityId", false, TIMEOUT, () -> "", listener); assertThat(listener.actionGet(TIMEOUT), nullValue()); } @@ -88,7 +89,7 @@ public void testGetExistingTaskInfoCallsListenerWithTaskInfoWhenTaskExists() { var client = mockClientWithTasksResponse(listTaskInfo, threadPool); var listener = new PlainActionFuture(); - getDownloadTaskInfo(client, "modelId", false, listener, TIMEOUT); + getDownloadTaskInfo(client, "inferenceEntityId", false, TIMEOUT, () -> "", listener); assertThat(listener.actionGet(TIMEOUT), is(listTaskInfo.get(0))); } @@ -98,11 +99,37 @@ public void testGetExistingTaskInfoCallsListenerWithFirstTaskInfoWhenMultipleTas var client = mockClientWithTasksResponse(listTaskInfo, threadPool); var listener = new PlainActionFuture(); - getDownloadTaskInfo(client, "modelId", false, listener, TIMEOUT); + getDownloadTaskInfo(client, "inferenceEntityId", false, TIMEOUT, () -> "", listener); assertThat(listener.actionGet(TIMEOUT), is(listTaskInfo.get(0))); } + public void testGetTimeoutOnWaitForCompletion() { + var client = mockListTasksClient(threadPool); + + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse( + new ListTasksResponse( + List.of(), + List.of(), + List.of(new ElasticsearchStatusException("node timeout", RestStatus.REQUEST_TIMEOUT)) + ) + ); + + return Void.TYPE; + }).when(client).execute(same(TransportListTasksAction.TYPE), any(), any()); + + var listener = new PlainActionFuture(); + + getDownloadTaskInfo(client, "inferenceEntityId", true, TIMEOUT, () -> "Testing timeout", listener); + + var exception = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exception.status(), is(RestStatus.REQUEST_TIMEOUT)); + assertThat(exception.getMessage(), is("Testing timeout")); + } + /** * A helper method for setting up a mock cluster client to return the passed in list of tasks. * diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java index 939ccde7df6c4..015614e56c02b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java @@ -27,7 +27,7 @@ public class TransportVersionUtilsTests extends ESTestCase { "Bertram", new CompatibilityVersions(TransportVersions.V_7_0_1, Map.of()), "Charles", - new CompatibilityVersions(TransportVersions.V_8_500_020, Map.of()), + new CompatibilityVersions(TransportVersions.V_8_9_X, Map.of()), "Dominic", new CompatibilityVersions(TransportVersions.V_8_0_0, Map.of()) ); @@ -79,6 +79,6 @@ public void testIsMinTransformVersionSameAsCurrent() { public void testIsMinTransportVersionOnOrAfter() { assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_7_0_0), equalTo(true)); - assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_8_500_020), equalTo(false)); + assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_8_9_X), equalTo(false)); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java index 4f1308e9295c2..4fded8ef8d05d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java @@ -172,7 +172,8 @@ protected SearchResponse createSearchResponseWithHits(String... hits) { SearchHits searchHits = createHits(hits); SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.getScrollId()).thenReturn(SCROLL_ID); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index bab8e5b22c37a..92d46e54ea1cc 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -183,6 +184,7 @@ public Collection createComponents(PluginServices services) { @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index eaec54ca9c1a3..753700a7ec913 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 13; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 14; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/LocalStateMonitoring.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/LocalStateMonitoring.java index 1f83dc6fc0309..e9e8a195656fd 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/LocalStateMonitoring.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/LocalStateMonitoring.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.TransportAction; @@ -28,6 +29,7 @@ import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; @@ -57,7 +59,7 @@ public MonitoringTransportXPackUsageAction( } @Override - protected List usageActions() { + protected List> usageActions() { return Collections.singletonList(XPackUsageFeatureAction.MONITORING); } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java index 44859b73ffb2e..c8aae302e357b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java @@ -41,7 +41,6 @@ public void cleanup() throws Exception { wipeMonitoringIndices(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/96374") public void testMultipleNodes() throws Exception { int nodes = 0; diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java index 5cd21eadb45ff..afeec7dd52b17 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java @@ -47,6 +47,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.common.xcontent.XContentHelper.convertToJson; import static org.elasticsearch.common.xcontent.XContentHelper.stripWhitespace; import static org.elasticsearch.xpack.core.ilm.CheckShrinkReadyStepTests.randomUnassignedInfo; @@ -464,7 +465,7 @@ private static IndexRoutingTable mockIndexRoutingTable( state = ShardRoutingState.UNASSIGNED; } - shard.addShard(TestShardRouting.newShardRouting(shardId, nodeId, null, true, state, unassignedInfo)); + shard.addShard(shardRoutingBuilder(shardId, nodeId, true, state).withUnassignedInfo(unassignedInfo).build()); // mark all as unassigned for (int j = 0; j < replicas; ++j) { diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java index 0e752a02b5ee4..c7f00063161e1 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.Metadata; @@ -24,7 +25,6 @@ import org.elasticsearch.license.PostStartTrialResponse; import org.elasticsearch.license.TransportDeleteLicenseAction; import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.snapshots.SnapshotRestoreException; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; @@ -57,7 +57,7 @@ public void testFeatureUsage() throws Exception { } public void testFailRestoreOnInvalidLicense() throws Exception { - assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new AcknowledgedRequest.Plain()).get()); assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); ensureClusterSizeConsistency(); @@ -93,7 +93,7 @@ public void testShardAllocationOnInvalidLicense() throws Exception { assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); ensureGreen(indexName); - assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new AcknowledgedRequest.Plain()).get()); assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); ensureClusterSizeConsistency(); diff --git a/x-pack/plugin/profiling/build.gradle b/x-pack/plugin/profiling/build.gradle index 8275bfe633c91..3790fb6e762aa 100644 --- a/x-pack/plugin/profiling/build.gradle +++ b/x-pack/plugin/profiling/build.gradle @@ -24,3 +24,7 @@ dependencies { testImplementation project(path: xpackModule('ilm')) testImplementation project(':modules:data-streams') } +tasks.named("dependencyLicenses").configure { + ignoreFile 'cloudcarbonfootprint-LICENSE.txt' + ignoreFile 'cloudcarbonfootprint-NOTICE.txt' +} diff --git a/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-LICENSE.txt b/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-LICENSE.txt new file mode 100644 index 0000000000000..b46d5a742382d --- /dev/null +++ b/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Thoughtworks Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-NOTICE.txt b/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-NOTICE.txt new file mode 100644 index 0000000000000..44eaf62f3b2c9 --- /dev/null +++ b/x-pack/plugin/profiling/licenses/cloudcarbonfootprint-NOTICE.txt @@ -0,0 +1,6 @@ +For CO2 calculations, we include and use data from +https://github.com/PaoloFrigo/cloud-carbon-footprint . + +License: Apache 2.0 + +Copyright: Cloud Carbon Footprint, (C) 2021 Thoughtworks, Inc. diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java index fa28877f5b4c1..ef5198499ff09 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java @@ -10,7 +10,6 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.logging.log4j.LogManager; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -49,7 +48,6 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103809") public class CancellationIT extends ProfilingTestCase { @Override protected Collection> nodePlugins() { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java index e0e4ef2a12985..8553574d39646 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.profiling; -import org.apache.lucene.tests.util.LuceneTestCase; - -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103809") public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, null, null, null, null, null, null, null, null); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 9c60a6bcdfc1c..8ad68ca7ceebc 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.profiling; -import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.shard.ShardId; import java.util.List; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103809") public class GetStackTracesActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, null, null, null, null, null, null, null, null); @@ -43,8 +44,10 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); } - public void testGetStackTracesFromAPMWithMatch() throws Exception { - TermQueryBuilder query = QueryBuilders.termQuery("transaction.name", "encodeSha1"); + public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception { + BoolQueryBuilder query = QueryBuilders.boolQuery(); + query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); + query.must().add(QueryBuilders.rangeQuery("@timestamp").lte("1698624000")); GetStackTracesRequest request = new GetStackTracesRequest( null, @@ -61,6 +64,7 @@ public void testGetStackTracesFromAPMWithMatch() throws Exception { ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); assertEquals(49, response.getTotalFrames()); + assertEquals(1.0d, response.getSamplingRate(), 0.001d); assertNotNull(response.getStackTraceEvents()); assertEquals(3L, response.getStackTraceEvents().get("Ce77w10WeIDow3kd1jowlA").count); @@ -82,6 +86,70 @@ public void testGetStackTracesFromAPMWithMatch() throws Exception { assertEquals("libzip.so", response.getExecutables().get("GXH6S9Nv2Lf0omTz4cH4RA")); } + public void testGetStackTracesFromAPMWithMatchAndDownsampling() throws Exception { + TermQueryBuilder query = QueryBuilders.termQuery("transaction.name", "encodeSha1"); + Index apmTest = resolveIndex("apm-test-001"); + + GetStackTracesRequest request = new GetStackTracesRequest( + 1, + 1.0d, + 1.0d, + query, + "apm-test-*", + "transaction.profiler_stack_trace_ids", + null, + null, + null, + null, + null + ) { + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + // The random sampler aggregation takes a user-provided seed as well as the index UUID into account for randomness. This is + // fine for a production use case but here we need full control over the internal seed so test results are stable. As + // the index UUID changes between test runs, and we have no control over it, we will instead modify the user provided seed + // so that the random number generator is always initialized the same, regardless of the index UUID. + // + // See org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplingQuery#createWeight(), specifically the + // initialization of SplittableRandom(), which uses both the "seed" (user-provided) and a "hash", which is built from + // ShardId#hashCode(). By using the same hash code, the XOR will always evaluate to 0, thus producing a consistent seed for + // SplittableRandom(). + int baseSeed = new ShardId(apmTest, 0).hashCode(); + // a seed of zero won't return results for our test scenario, so we toggle one bit to generate a consistent non-zero seed. + return baseSeed ^ 2; + } + }; + + GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); + assertEquals(49, response.getTotalFrames()); + assertEquals(0.2d, response.getSamplingRate(), 0.001d); + + assertNotNull(response.getStackTraceEvents()); + // as the sampling rate is 0.2, we see 5 times more samples (random sampler agg automatically adjusts sample count) + assertEquals(5 * 3L, response.getStackTraceEvents().get("Ce77w10WeIDow3kd1jowlA").count); + assertEquals(5 * 2L, response.getStackTraceEvents().get("JvISdnJ47BQ01489cwF9DA").count); + + assertNotNull(response.getStackTraces()); + // just do a high-level spot check. Decoding is tested in unit-tests + StackTrace stackTrace = response.getStackTraces().get("Ce77w10WeIDow3kd1jowlA"); + assertEquals(39, stackTrace.addressOrLines.size()); + assertEquals(39, stackTrace.fileIds.size()); + assertEquals(39, stackTrace.frameIds.size()); + assertEquals(39, stackTrace.typeIds.size()); + + assertNotNull(response.getStackFrames()); + StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); + assertEquals(List.of("deflate", "deflate"), stackFrame.functionName); + + assertNotNull(response.getExecutables()); + assertEquals("libzip.so", response.getExecutables().get("GXH6S9Nv2Lf0omTz4cH4RA")); + } + public void testGetStackTracesFromAPMNoMatch() throws Exception { TermQueryBuilder query = QueryBuilders.termQuery("transaction.name", "nonExistingTransaction"); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java index 8dbab6e8c06a5..f3417dbf5d472 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java @@ -52,10 +52,11 @@ public void testNoTimeoutIfNotWaiting() throws Exception { assertFalse(response.hasData()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104035") public void testWaitsUntilResourcesAreCreated() throws Exception { updateProfilingTemplatesEnabled(true); GetStatusAction.Request request = new GetStatusAction.Request(); + // higher timeout since we have more shards than usual + request.timeout(TimeValue.timeValueSeconds(120)); request.waitForResourcesCreated(true); GetStatusAction.Response response = client().execute(GetStatusAction.INSTANCE, request).get(); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index 82d6f6193505d..6424c0f3ae259 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.plugins.Plugin; @@ -127,7 +128,8 @@ protected final void doSetupData() throws Exception { ); allIndices.add(apmTestIndex); waitForIndices(allIndices); - ensureGreen(allIndices.toArray(new String[0])); + // higher timeout since we have more shards than usual + ensureGreen(TimeValue.timeValueSeconds(120), allIndices.toArray(new String[0])); bulkIndex("data/profiling-events-all.ndjson"); bulkIndex("data/profiling-stacktraces.ndjson"); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/apm-test.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/apm-test.ndjson index d147256d6b90f..3cc9b6038fcbd 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/apm-test.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/apm-test.ndjson @@ -1,2 +1,10 @@ {"create": {"_index": "apm-test-001"}} {"@timestamp": "1698624000", "transaction.name": "encodeSha1", "transaction.profiler_stack_trace_ids": ["Ce77w10WeIDow3kd1jowlA", "JvISdnJ47BQ01489cwF9DA", "JvISdnJ47BQ01489cwF9DA", "Ce77w10WeIDow3kd1jowlA", "Ce77w10WeIDow3kd1jowlA"]} +{"create": {"_index": "apm-test-001"}} +{"@timestamp": "1698624001", "transaction.name": "encodeSha1", "transaction.profiler_stack_trace_ids": ["Ce77w10WeIDow3kd1jowlA", "JvISdnJ47BQ01489cwF9DA", "JvISdnJ47BQ01489cwF9DA", "Ce77w10WeIDow3kd1jowlA", "Ce77w10WeIDow3kd1jowlA"]} +{"create": {"_index": "apm-test-001"}} +{"@timestamp": "1698624002", "transaction.name": "encodeSha1", "transaction.profiler_stack_trace_ids": ["Ce77w10WeIDow3kd1jowlA", "JvISdnJ47BQ01489cwF9DA", "JvISdnJ47BQ01489cwF9DA", "Ce77w10WeIDow3kd1jowlA", "Ce77w10WeIDow3kd1jowlA"]} +{"create": {"_index": "apm-test-001"}} +{"@timestamp": "1698624003", "transaction.name": "encodeSha1", "transaction.profiler_stack_trace_ids": ["Ce77w10WeIDow3kd1jowlA", "JvISdnJ47BQ01489cwF9DA", "JvISdnJ47BQ01489cwF9DA", "Ce77w10WeIDow3kd1jowlA", "Ce77w10WeIDow3kd1jowlA"]} +{"create": {"_index": "apm-test-001"}} +{"@timestamp": "1698624004", "transaction.name": "encodeSha1", "transaction.profiler_stack_trace_ids": ["Ce77w10WeIDow3kd1jowlA", "JvISdnJ47BQ01489cwF9DA", "JvISdnJ47BQ01489cwF9DA", "Ce77w10WeIDow3kd1jowlA", "Ce77w10WeIDow3kd1jowlA"]} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/indices/apm-test.json b/x-pack/plugin/profiling/src/internalClusterTest/resources/indices/apm-test.json index e0aeb707ffc76..4cdc739359c0b 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/indices/apm-test.json +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/indices/apm-test.json @@ -1,7 +1,8 @@ { "settings": { "index" : { - "number_of_replicas" : 0 + "number_of_replicas" : 0, + "number_of_shards": 1 } }, "mappings": { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 1e44cba4e62b2..454cd35b396b9 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -9,8 +9,6 @@ import java.util.Map; -import static java.util.Map.entry; - final class CO2Calculator { private static final double DEFAULT_SAMPLING_FREQUENCY = 20.0d; private static final double DEFAULT_CO2_TONS_PER_KWH = 0.000379069d; // unit: metric tons / kWh @@ -51,12 +49,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { return DEFAULT_KILOWATTS_PER_CORE * customCO2PerKWH * annualCoreHours * customDatacenterPUE; } - CostEntry costs = InstanceTypeService.getCosts(host.instanceType); - if (costs == null) { - return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); - } - - return annualCoreHours * costs.co2Factor; // unit: metric tons + return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } private double getKiloWattsPerCore(HostMetadata host) { @@ -71,150 +64,10 @@ private double getKiloWattsPerCore(HostMetadata host) { } private double getCO2TonsPerKWH(HostMetadata host) { - Provider provider = PROVIDERS.get(host.instanceType.provider); - return provider == null ? customCO2PerKWH : provider.co2TonsPerKWH.getOrDefault(host.instanceType.region, customCO2PerKWH); + return CloudProviders.getCO2TonsPerKWHOrDefault(host.instanceType.provider, host.instanceType.region, customCO2PerKWH); } private double getDatacenterPUE(HostMetadata host) { - Provider provider = PROVIDERS.get(host.instanceType.provider); - return provider == null ? customDatacenterPUE : provider.pue; - } - - private record Provider(double pue, Map co2TonsPerKWH) {} - - // values are taken from https://www.cloudcarbonfootprint.org/docs/methodology/ - private static final Map PROVIDERS; - static { - // noinspection (explicit type arguments speedup compilation and analysis time) - PROVIDERS = Map.of( - "aws", - new Provider( - 1.135d, - Map.ofEntries( - entry("us-east-1", 0.000379069d), - entry("us-east-2", 0.000410608d), - entry("us-west-1", 0.000322167d), - entry("us-west-2", 0.000322167d), - entry("us-gov-east-1", 0.000379069d), - entry("us-gov-west-1", 0.000322167d), - entry("af-south-1", 0.0009006d), - entry("ap-east-1", 0.00071d), - entry("ap-south-1", 0.0007082d), - entry("ap-northeast-3", 0.0004658d), - entry("ap-northeast-2", 0.0004156d), - entry("ap-southeast-1", 0.000408d), - entry("ap-southeast-2", 0.00076d), - entry("ap-northeast-1", 0.0004658d), - entry("ca-central-1", 0.00012d), - entry("cn-north-1", 0.0005374d), - entry("cn-northwest-1", 0.0005374d), - entry("eu-central-1", 0.000311d), - entry("eu-west-1", 0.0002786d), - entry("eu-west-2", 0.000225d), - entry("eu-south-1", 0.0002134d), - entry("eu-west-3", 0.0000511d), - entry("eu-north-1", 0.0000088d), - entry("me-south-1", 0.0005059d), - entry("sa-east-1", 0.0000617d) - ) - ), - // noinspection (explicit type arguments speedup compilation and analysis time) - "gcp", - new Provider( - 1.1d, - Map.ofEntries( - entry("us-central1", 0.00003178d), - entry("us-east1", 0.0003504d), - entry("us-east4", 0.00015162d), - entry("us-west1", 0.0000078d), - entry("us-west2", 0.00011638d), - entry("us-west3", 0.00038376d), - entry("us-west4", 0.00036855d), - entry("asia-east1", 0.0004428d), - entry("asia-east2", 0.000453d), - entry("asia-northeast1", 0.00048752d), - entry("asia-northeast2", 0.00048752d), - entry("asia-northeast3", 0.00031533d), - entry("asia-south1", 0.00063448d), - entry("asia-south2", 0.000657d), - entry("asia-southeast1", 0.00047328d), - entry("asia-southeast2", 0.000647d), - entry("australia-southeast1", 0.00064703d), - entry("australia-southeast2", 0.000691d), - entry("europe-central2", 0.000622d), - entry("europe-north1", 0.00000798d), - entry("europe-west1", 0.00004452d), - entry("europe-west2", 0.00009471d), - entry("europe-west3", 0.000108), - entry("europe-west4", 0.000164d), - entry("europe-west6", 0.000087d), - entry("northamerica-northeast1", 0.000027d), - entry("southamerica-east1", 0.00001236d) - ) - ), - "azure", - new Provider( - 1.185d, - Map.ofEntries( - entry("centralus", 0.000426254d), - entry("eastus", 0.000379069d), - entry("eastus2", 0.000379069d), - entry("eastus3", 0.000379069d), - entry("northcentralus", 0.000410608d), - entry("southcentralus", 0.000373231d), - entry("westcentralusS", 0.000322167d), - entry("westus", 0.000322167d), - entry("westus2", 0.000322167d), - entry("westus3", 0.000322167d), - entry("eastasia", 0.00071d), - entry("southeastasia", 0.000408d), - entry("southafricanorth", 0.0009006d), - entry("southafricawest", 0.0009006d), - entry("southafrica", 0.0009006d), - entry("australia", 0.00079d), - entry("australiacentral", 0.00079d), - entry("australiacentral2", 0.00079d), - entry("australiaeast", 0.00079d), - entry("australiasoutheast", 0.00096d), - entry("japan", 0.0004658d), - entry("japanwest", 0.0004658d), - entry("japaneast", 0.0004658d), - entry("korea", 0.0004156d), - entry("koreaeast", 0.0004156d), - entry("koreasouth", 0.0004156d), - entry("india", 0.0007082d), - entry("indiawest", 0.0007082d), - entry("indiacentral", 0.0007082d), - entry("indiasouth", 0.0007082d), - entry("northeurope", 0.0002786d), - entry("westeurope", 0.0003284d), - entry("france", 0.00005128d), - entry("francecentral", 0.00005128d), - entry("francesouth", 0.00005128d), - entry("swedencentral", 0.00000567d), - entry("switzerland", 0.00000567d), - entry("switzerlandnorth", 0.00000567d), - entry("switzerlandwest", 0.00000567d), - entry("uk", 0.000225d), - entry("uksouth", 0.000225d), - entry("ukwest", 0.000228d), - entry("germany", 0.00033866d), - entry("germanynorth", 0.00033866d), - entry("germanywestcentral", 0.00033866d), - entry("norway", 0.00000762d), - entry("norwayeast", 0.00000762d), - entry("norwaywest", 0.00000762d), - entry("unitedarabemirates", 0.0004041d), - entry("unitedarabemiratesnorth", 0.0004041d), - entry("unitedarabemiratescentral", 0.0004041d), - entry("canada", 0.00012d), - entry("canadacentral", 0.00012d), - entry("canadaeast", 0.00012d), - entry("brazil", 0.0000617d), - entry("brazilsouth", 0.0000617d), - entry("brazilsoutheast", 0.0000617d) - ) - ) - ); + return CloudProviders.getPUEOrDefault(host.instanceType.provider, customDatacenterPUE); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java new file mode 100644 index 0000000000000..0245df13f8fad --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java @@ -0,0 +1,247 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import java.util.Map; + +import static java.util.Map.entry; + +final class CloudProviders { + private CloudProviders() { + // no instances intended + } + + public record Provider(double pue, Map co2TonsPerKWH) {} + + private static Provider getProvider(String providerName) { + return PROVIDERS.get(providerName); + } + + /** + * Returns the PUE for the given provider, or the default value if the provider is unknown. + * PUE stands for Power Usage Effectiveness, and is a measure of how much power is used by + * the datacenter infrastructure (cooling, lighting, etc.) vs. the IT equipment (servers, etc.). + * A PUE of 1.0 means that all power is used by the IT equipment, and a PUE of 2.0 means that + * half of the power is used by the IT equipment and half is used by the infrastructure. + * See also https://en.wikipedia.org/wiki/Power_usage_effectiveness . + * + * @param providerName The name of the provider. + * Currently supported providers are "aws", "gcp", and "azure". + * If the provider is unknown, the default value is returned. + * @param defaultPUEValue The default value to return if the provider is unknown. + * @return The PUE for the given provider, or the default value if the provider is unknown. + */ + public static double getPUEOrDefault(String providerName, double defaultPUEValue) { + Provider provider = getProvider(providerName); + if (provider == null) { + return defaultPUEValue; + } + return provider.pue; + } + + /** + * Returns the CO2 emission factor for the given provider and region, or the default value if + * the provider or region is unknown. The CO2 emission factor is the amount of CO2 emitted per + * kWh of electricity consumed and measured in metric tons. + * + * @param providerName The name of the provider. + * Currently supported providers are "aws", "gcp", and "azure". + * If the provider is unknown, the default value is returned. + * @param region The name of the region. + * If the region is unknown, the default value is returned. + * @param defaultCO2Value The default value to return if the provider or region is unknown. + * @return The CO2 emission factor for the given provider and region, or the default value if + * the provider or region is unknown. + */ + public static double getCO2TonsPerKWHOrDefault(String providerName, String region, double defaultCO2Value) { + Provider provider = getProvider(providerName); + if (provider == null) { + return defaultCO2Value; + } + return provider.co2TonsPerKWH.getOrDefault(region, defaultCO2Value); + } + + // The following data taken from https://www.cloudcarbonfootprint.org/docs/methodology/ + // and updated from https://github.com/PaoloFrigo/cloud-carbon-footprint . + // License: Apache 2.0 + // Copyright: Cloud Carbon Footprint, (C) 2021 Thoughtworks, Inc. + private static final Map PROVIDERS; + static { + // noinspection (explicit type arguments speedup compilation and analysis time) + PROVIDERS = Map.of( + "aws", + new Provider( + 1.135d, + Map.ofEntries( + entry("us-east-1", 0.000379069d), + entry("us-east-2", 0.000410608d), + entry("us-west-1", 0.000322167d), + entry("us-west-2", 0.000322167d), + entry("us-gov-east-1", 0.000379069d), + entry("us-gov-west-1", 0.000322167d), + entry("af-south-1", 0.0009006d), + entry("ap-east-1", 0.00071d), + entry("ap-south-1", 0.0007082d), + entry("ap-northeast-3", 0.0004658d), + entry("ap-northeast-2", 0.0004156d), + entry("ap-southeast-1", 0.000408d), + entry("ap-southeast-2", 0.00076d), + entry("ap-southeast-3", 0.0007177d), + entry("ap-northeast-1", 0.0004658d), + entry("ca-central-1", 0.00012d), + entry("cn-north-1", 0.0005374d), + entry("cn-northwest-1", 0.0005374d), + entry("eu-central-1", 0.000311d), + entry("eu-west-1", 0.0002786d), + entry("eu-west-2", 0.000225d), + entry("eu-south-1", 0.0002134d), + entry("eu-west-3", 0.0000511d), + entry("eu-north-1", 0.0000088d), + entry("me-south-1", 0.0005059d), + entry("me-central-1", 0.0004041), + entry("sa-east-1", 0.0000617d) + ) + ), + // noinspection (explicit type arguments speedup compilation and analysis time) + "gcp", + new Provider( + 1.1d, + // These emission factors take into account Google Carbon Free Energy percentage in each region. + // Source: https://cloud.google.com/sustainability/region-carbon + Map.ofEntries( + entry("us-central1", 0.0002152373529d), + entry("us-central2", 0.0002152373529d), + entry("us-east1", 0.0003255d), + entry("us-east4", 0.00011124d), + entry("us-east5", 0.00011124d), + entry("us-west1", 0.0000072d), + entry("us-west2", 0.0000893d), + entry("us-west3", 0.00030912d), + entry("us-west4", 0.00028835d), + entry("us-south1", 0.0001776d), + entry("asia-east1", 0.00037848d), + entry("asia-east2", 0.0002592d), + entry("asia-northeast1", 0.00038976d), + entry("asia-northeast2", 0.00026496d), + entry("asia-northeast3", 0.00029325d), + entry("asia-south1", 0.000603d), + entry("asia-south2", 0.00061732d), + entry("asia-southeast1", 0.00035712d), + entry("asia-southeast2", 0.0005046d), + entry("australia-southeast1", 0.00047242d), + entry("australia-southeast2", 0.00035949d), + entry("europe-central2", 0.0004608d), + entry("europe-north1", 0.00001143d), + entry("europe-southwest1", 0.000121d), + entry("europe-west1", 0.0000198d), + entry("europe-west2", 0.00007396d), + entry("europe-west3", 0.0001076), + entry("europe-west4", 0.00013301d), + entry("europe-west6", 0.0000129d), + entry("europe-west8", 0.000298d), + entry("europe-west9", 0.000059d), + entry("northamerica-northeast1", 0d), // Montreal is 100% CFE + entry("northamerica-northeast2", 0.00000232d), + entry("southamerica-east1", 0.00002838d), + entry("southamerica-west1", 0.0000589d) + ) + ), + "azure", + new Provider( + 1.185d, + Map.ofEntries( + entry("centralus", 0.000426254d), + entry("centraluseuap", 0.000426254d), + entry("centralusestage", 0.000426254d), + entry("eastus", 0.000379069d), + entry("useast", 0.000379069d), + entry("eastusstage", 0.000379069d), + entry("eastus2", 0.000379069d), + entry("useast2", 0.000379069d), + entry("eastus2euap", 0.000379069d), + entry("eastus2stage", 0.000379069d), + entry("eastus3", 0.000379069d), + entry("usnorth", 0.000410608d), + entry("northcentralus", 0.000410608d), + entry("northcentralusstage", 0.000410608d), + entry("southcentralus", 0.000373231d), + entry("southcentralusstage", 0.000373231d), + entry("unitedstates", 0.000373231d), + entry("unitedstateseuap", 0.000373231d), + entry("westcentralus", 0.000322167d), + entry("westcentralusstage", 0.000322167d), + entry("westus", 0.000322167d), + entry("westusstage", 0.000322167d), + entry("westus2", 0.000322167d), + entry("westus2stage", 0.000322167d), + entry("westus3", 0.000322167d), + entry("asia", 0.0005647d), + entry("asiapacific", 0.0005647d), + entry("eastasia", 0.00071d), + entry("eastasiastage", 0.00071d), + entry("southeastasia", 0.000408d), + entry("asiasoutheast", 0.000408d), + entry("southafricanorth", 0.0009006d), + entry("southafricawest", 0.0009006d), + entry("southafrica", 0.0009006d), + entry("australia", 0.00079d), + entry("australiacentral", 0.00079d), + entry("australiacentral2", 0.00079d), + entry("australiaeast", 0.00079d), + entry("australiasoutheast", 0.00096d), + entry("apeast", 0.00071d), + entry("apsoutheast", 0.000408d), + entry("japan", 0.0004658d), + entry("japanwest", 0.0004658d), + entry("japaneast", 0.0004658d), + entry("korea", 0.0004156d), + entry("koreaeast", 0.0004156d), + entry("koreasouth", 0.0004156d), + entry("india", 0.0007082d), + entry("indiacentral", 0.0007082d), + entry("centralindia", 0.0007082d), + entry("jioindiacentral", 0.0007082d), + entry("indiawest", 0.0007082d), + entry("westindia", 0.0007082d), + entry("jioindiawest", 0.0007082d), + entry("indiasouth", 0.0007082d), + entry("southindia", 0.0007082d), + entry("northeurope", 0.0002786d), + entry("europenorth", 0.0002786d), + entry("westeurope", 0.0003284d), + entry("europewest", 0.0003284d), + entry("france", 0.00005128d), + entry("francecentral", 0.00005128d), + entry("francesouth", 0.00005128d), + entry("swedencentral", 0.00000567d), + entry("switzerland", 0.00001152d), + entry("switzerlandnorth", 0.00001152d), + entry("switzerlandwest", 0.00001152d), + entry("uk", 0.000225d), + entry("uksouth", 0.000225d), + entry("ukwest", 0.000228d), + entry("germany", 0.00033866d), + entry("germanynorth", 0.00033866d), + entry("germanywestcentral", 0.00033866d), + entry("norway", 0.00000762d), + entry("norwayeast", 0.00000762d), + entry("norwaywest", 0.00000762d), + entry("uae", 0.0004041d), + entry("uaenorth", 0.0004041d), + entry("uaecentral", 0.0004041d), + entry("canada", 0.00012d), + entry("canadacentral", 0.00012d), + entry("canadaeast", 0.00012d), + entry("brazil", 0.0000617d), + entry("brazilsouth", 0.0000617d), + entry("brazilsoutheast", 0.0000617d) + ) + ) + ); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java index ecaaee5d3bf4b..99923d19d81ac 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java @@ -13,7 +13,7 @@ final class CostCalculator { private static final double DEFAULT_SAMPLING_FREQUENCY = 20.0d; private static final double SECONDS_PER_HOUR = 60 * 60; private static final double SECONDS_PER_YEAR = SECONDS_PER_HOUR * 24 * 365.0d; // unit: seconds - private static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) + public static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) private static final double DEFAULT_AWS_COST_FACTOR = 1.0d; private final Map hostMetadata; private final double samplingDurationInSeconds; @@ -47,7 +47,7 @@ public double annualCostsUSD(String hostID, double samples) { return annualCoreHours * customCostPerCoreHour * providerCostFactor; } - return annualCoreHours * costs.costFactor * providerCostFactor; + return annualCoreHours * (costs.usd_per_hour / host.profilingNumCores) * providerCostFactor; } public static double annualCoreHours(double duration, double samples, double samplingFrequency) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java index 6033e650072bc..b6795294e7f06 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java @@ -7,31 +7,25 @@ package org.elasticsearch.xpack.profiling; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; import java.util.Map; -final class CostEntry implements ToXContentObject { - final double co2Factor; - final double costFactor; +final class CostEntry { + final double usd_per_hour; - CostEntry(double co2Factor, double costFactor) { - this.co2Factor = co2Factor; - this.costFactor = costFactor; + CostEntry(double usdPerHour) { + this.usd_per_hour = usdPerHour; } public static CostEntry fromSource(Map source) { - return new CostEntry((Double) source.get("co2_factor"), (Double) source.get("cost_factor")); - } + var val = source.get("usd_per_hour"); + + if (val instanceof Number n) { + // Some JSON values have no decimal places and are passed in as Integers. + return new CostEntry(n.doubleValue()); + } else if (val == null) { + return new CostEntry(CostCalculator.DEFAULT_COST_USD_PER_CORE_HOUR * HostMetadata.DEFAULT_PROFILING_NUM_CORES); + } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("co2_factor", this.co2Factor); - builder.field("cost_factor", this.costFactor); - builder.endObject(); - return builder; + throw new IllegalArgumentException("[" + val + "] is an invalid value for [usd_per_hour]"); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java index fc04f735fdf87..3719722ad2d62 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java @@ -7,13 +7,12 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public final class GetFlamegraphAction extends ActionType { public static final GetFlamegraphAction INSTANCE = new GetFlamegraphAction(); public static final String NAME = "indices:data/read/profiling/flamegraph"; private GetFlamegraphAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java index 84ab6643be781..1fd87740d6292 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java @@ -7,13 +7,12 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public final class GetStackTracesAction extends ActionType { public static final GetStackTracesAction INSTANCE = new GetStackTracesAction(); public static final String NAME = "indices:data/read/profiling/stack_traces"; private GetStackTracesAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index efa8fc1d64244..7672e2cc0c05b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -47,7 +47,7 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesReque private static final int DEFAULT_SAMPLE_SIZE = 20_000; private QueryBuilder query; - private Integer sampleSize; + private int sampleSize; private String indices; private String stackTraceIds; private Double requestedDuration; @@ -80,7 +80,7 @@ public GetStackTracesRequest( Double customPerCoreWattARM64, Double customCostPerCoreHour ) { - this.sampleSize = sampleSize; + this.sampleSize = sampleSize != null ? sampleSize : DEFAULT_SAMPLE_SIZE; this.requestedDuration = requestedDuration; this.awsCostFactor = awsCostFactor; this.query = query; @@ -98,8 +98,8 @@ public void writeTo(StreamOutput out) { TransportAction.localOnly(); } - public Integer getSampleSize() { - return sampleSize != null ? sampleSize : DEFAULT_SAMPLE_SIZE; + public int getSampleSize() { + return sampleSize; } public Double getRequestedDuration() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java index cb88021eebcf8..59132d45995e3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.profiling; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -26,7 +25,7 @@ public class GetStatusAction extends ActionType { public static final String NAME = "cluster:monitor/profiling/status/get"; protected GetStatusAction() { - super(NAME, GetStatusAction.Response::new); + super(NAME); } public static class Response extends ActionResponse implements ToXContentObject { @@ -144,11 +143,6 @@ public void waitForResourcesCreated(boolean waitForResourcesCreated) { this.waitForResourcesCreated = waitForResourcesCreated; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e0b634b5fb9dd..e9f912a3f60e5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -15,23 +15,29 @@ import java.util.Objects; final class HostMetadata implements ToXContentObject { + // "present_cpu_cores" is missing in the host metadata when collected before 8.12.0. + // 4 seems to be a reasonable default value. + static final int DEFAULT_PROFILING_NUM_CORES = 4; final String hostID; final InstanceType instanceType; final String profilingHostMachine; // aarch64 or x86_64 + final int profilingNumCores; // number of cores on the profiling host machine - HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine) { + HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine, Integer profilingNumCores) { this.hostID = hostID; this.instanceType = instanceType; this.profilingHostMachine = profilingHostMachine; + this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); String profilingHostMachine = (String) source.get("profiling.host.machine"); - return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine); + Integer profilingNumCores = (Integer) source.get("profiling.agent.config.present_cpu_cores"); + return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine, profilingNumCores); } - return new HostMetadata("", new InstanceType("", "", ""), ""); + return new HostMetadata("", new InstanceType("", "", ""), "", null); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java index 701b2d8d8728d..7d1c5bdbf66a3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java @@ -55,6 +55,11 @@ static boolean isAnyAssignedToNode(ClusterState state, List indices, Pred * @return true iff at least one index is allocated to either a warm or cold data node. */ static boolean isAnyOnWarmOrColdTier(ClusterState state, List indices) { - return isAnyAssignedToNode(state, indices, n -> DataTier.isWarmNode(n) || DataTier.isColdNode(n)); + return isAnyAssignedToNode( + state, + indices, + // a content node is never considered a warm or cold node + n -> DataTier.isContentNode(n) == false && (DataTier.isWarmNode(n) || DataTier.isColdNode(n)) + ); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index b105cde3d5c2a..c07d2a480b006 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -117,6 +118,7 @@ public void updateTemplatesEnabled(boolean newValue) { @Override public List getRestHandlers( final Settings settings, + NamedWriteableRegistry namedWriteableRegistry, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java index 1cbc724132d10..b417e267f12da 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java @@ -20,6 +20,9 @@ import java.util.function.Consumer; final class StackTrace implements ToXContentObject { + private static final String[] PATH_FRAME_IDS = new String[] { "Stacktrace", "frame", "ids" }; + private static final String[] PATH_FRAME_TYPES = new String[] { "Stacktrace", "frame", "types" }; + static final int NATIVE_FRAME_TYPE = 3; static final int KERNEL_FRAME_TYPE = 4; List addressOrLines; @@ -188,8 +191,8 @@ static String getFileIDFromStackFrameID(String frameID) { } public static StackTrace fromSource(Map source) { - String inputFrameIDs = ObjectPath.eval("Stacktrace.frame.ids", source); - String inputFrameTypes = ObjectPath.eval("Stacktrace.frame.types", source); + String inputFrameIDs = ObjectPath.eval(PATH_FRAME_IDS, source); + String inputFrameTypes = ObjectPath.eval(PATH_FRAME_TYPES, source); int countsFrameIDs = inputFrameIDs.length() / BASE64_FRAME_ID_LENGTH; List fileIDs = new ArrayList<>(countsFrameIDs); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 9b5eaeb4aa6fd..33d568fbd0cdb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -30,6 +30,8 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; @@ -59,7 +61,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; @@ -217,8 +218,42 @@ private void searchGenericEvents( ActionListener submitListener, GetStackTracesResponseBuilder responseBuilder ) { - responseBuilder.setSamplingRate(1.0d); - client.prepareSearch(request.indices()) + StopWatch watch = new StopWatch("getSamplingRate"); + client.prepareSearch(request.getIndices()) + .setSize(0) + .setTrackTotalHits(true) + .setRequestCache(true) + .setPreference(String.valueOf(request.hashCode())) + .setQuery(request.getQuery()) + .execute(ActionListener.wrap(searchResponse -> { + long sampleCount = searchResponse.getHits().getTotalHits().value; + int requestedSampleCount = request.getSampleSize(); + // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 + if (sampleCount <= requestedSampleCount * 2L) { + responseBuilder.setSamplingRate(1.0d); + } else { + responseBuilder.setSamplingRate((double) requestedSampleCount / (double) sampleCount); + } + log.debug(watch::report); + log.debug( + "User requested [{}] samples, [{}] samples matched in [{}]. Sampling rate is [{}].", + requestedSampleCount, + sampleCount, + request.getIndices(), + responseBuilder.getSamplingRate() + ); + searchGenericEventGroupedByStackTrace(submitTask, client, request, submitListener, responseBuilder); + }, submitListener::onFailure)); + } + + private void searchGenericEventGroupedByStackTrace( + Task submitTask, + Client client, + GetStackTracesRequest request, + ActionListener submitListener, + GetStackTracesResponseBuilder responseBuilder + ) { + client.prepareSearch(request.getIndices()) .setTrackTotalHits(false) .setSize(0) // take advantage of request cache and keep a consistent order for the same request @@ -228,11 +263,16 @@ private void searchGenericEvents( .addAggregation(new MinAggregationBuilder("min_time").field("@timestamp")) .addAggregation(new MaxAggregationBuilder("max_time").field("@timestamp")) .addAggregation( - new CountedTermsAggregationBuilder("group_by").size(MAX_TRACE_EVENTS_RESULT_SIZE).field(request.getStackTraceIds()) + new RandomSamplerAggregationBuilder("sample").setSeed(request.hashCode()) + .setProbability(responseBuilder.getSamplingRate()) + .subAggregation( + new CountedTermsAggregationBuilder("group_by").size(MAX_TRACE_EVENTS_RESULT_SIZE).field(request.getStackTraceIds()) + ) ) .execute(handleEventsGroupedByStackTrace(submitTask, client, responseBuilder, submitListener, searchResponse -> { long totalSamples = 0; - StringTerms stacktraces = searchResponse.getAggregations().get("group_by"); + SingleBucketAggregation sample = searchResponse.getAggregations().get("sample"); + StringTerms stacktraces = sample.getAggregations().get("group_by"); // When we switch to aggregation by (hostID, stacktraceID) we need to change the empty List to this. // List hostEventCounts = new ArrayList<>(MAX_TRACE_EVENTS_RESULT_SIZE); @@ -256,6 +296,7 @@ private void searchGenericEvents( } responseBuilder.setTotalSamples(totalSamples); responseBuilder.setHostEventCounts(hostEventCounts); + log.debug("Found [{}] stacktrace events.", stackTraceEvents.size()); return stackTraceEvents; })); } @@ -483,11 +524,8 @@ private class StackTraceHandler { private final GetStackTracesResponseBuilder responseBuilder; private final ActionListener submitListener; private final Map stackTracePerId; - // sort items lexicographically to access Lucene's term dictionary more efficiently when issuing an mget request. - // The term dictionary is lexicographically sorted and using the same order reduces the number of page faults - // needed to load it. - private final Set stackFrameIds = new ConcurrentSkipListSet<>(); - private final Set executableIds = new ConcurrentSkipListSet<>(); + private final Set stackFrameIds; + private final Set executableIds; private final AtomicInteger totalFrames = new AtomicInteger(); private final StopWatch watch = new StopWatch("retrieveStackTraces"); private final StopWatch hostsWatch = new StopWatch("retrieveHostMetadata"); @@ -506,6 +544,9 @@ private StackTraceHandler( this.submitTask = submitTask; this.clusterState = clusterState; this.stackTracePerId = new ConcurrentHashMap<>(stackTraceCount); + // pre-size with a bit of headroom so the collection isn't resized too often + this.stackFrameIds = ConcurrentHashMap.newKeySet(stackTraceCount * 5); + this.executableIds = ConcurrentHashMap.newKeySet(stackTraceCount); this.expectedResponses = new AtomicInteger(expectedResponses); this.client = client; this.responseBuilder = responseBuilder; @@ -683,6 +724,7 @@ private void retrieveStackTraceDetails( * Collects stack trace details which are retrieved concurrently and sends a response only when all details are known. */ private static class DetailsHandler { + private static final String[] PATH_FILE_NAME = new String[] { "Executable", "file", "name" }; private final GetStackTracesResponseBuilder builder; private final ActionListener submitListener; private final Map executables; @@ -740,7 +782,7 @@ public void onExecutableDetailsResponse(MultiGetResponse multiGetItemResponses) if (executable.getResponse().isExists()) { // Duplicates are expected as we query multiple indices - do a quick pre-check before we deserialize a response if (executables.containsKey(executable.getId()) == false) { - String fileName = ObjectPath.eval("Executable.file.name", executable.getResponse().getSource()); + String fileName = ObjectPath.eval(PATH_FILE_NAME, executable.getResponse().getSource()); if (fileName != null) { executables.putIfAbsent(executable.getId(), fileName); } else { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java index 004eae1395dc1..d918a0def7ebb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; @@ -177,7 +178,17 @@ private void execute(ClusterState state, ActionListener { + // no data yet + if (e instanceof SearchPhaseExecutionException) { + log.trace("Has data check has failed.", e); + listener.onResponse( + new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, false) + ); + } else { + listener.onFailure(e); + } + })); } else { listener.onResponse(new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, false, anyPre891Data, false)); } diff --git a/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz b/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz index e54b3175c7237..590c0ff606201 100644 Binary files a/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz and b/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz differ diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java index dadd541808300..48cc535dbe7e4 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java @@ -28,7 +28,8 @@ public void testCreateFromRegularSource() { "eu-west-1", "c5n.xlarge" ), - "" // Doesn't matter if datacenter is known. + "", // Doesn't matter if datacenter is known. + null ) ), Map.entry(HOST_ID_B, @@ -39,7 +40,8 @@ public void testCreateFromRegularSource() { "europe-west1", null // Doesn't matter for unknown datacenters. ), - "x86_64" + "x86_64", + null ) ), Map.entry(HOST_ID_C, @@ -50,7 +52,8 @@ public void testCreateFromRegularSource() { "northcentralus", null // Doesn't matter for unknown datacenters. ), - "aarch64" + "aarch64", + null ) ), Map.entry(HOST_ID_D, @@ -61,7 +64,8 @@ public void testCreateFromRegularSource() { "on-prem-region", null // Doesn't matter for unknown datacenters. ), - "aarch64" + "aarch64", + null ) ) ); @@ -72,17 +76,12 @@ public void testCreateFromRegularSource() { double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); CO2Calculator co2Calculator = new CO2Calculator(hostsTable, samplingDurationInSeconds, null, null, null, null); - checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_A, samples), annualCoreHours, 0.000002213477d); - checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_B, samples), annualCoreHours, 1.1d, 0.00004452d, 7.0d); + checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_A, samples), annualCoreHours, 1.135d, 0.0002786d, 7.0d); + checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_B, samples), annualCoreHours, 1.1d, 0.0000198d, 7.0d); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_C, samples), annualCoreHours, 1.185d, 0.000410608d, 2.8d); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_D, samples), annualCoreHours, 1.7d, 0.000379069d, 2.8d); } - private void checkCO2Calculation(double calculatedAnnualCO2Tons, double annualCoreHours, double co2Factor) { - double expectedAnnualCO2Tons = annualCoreHours * co2Factor; - assertEquals(expectedAnnualCO2Tons, calculatedAnnualCO2Tons, 0.000000000001d); - } - private void checkCO2Calculation( double calculatedAnnualCO2Tons, double annualCoreHours, diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java index 030616d285416..185451d0a9235 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java @@ -26,7 +26,8 @@ public void testCreateFromRegularSource() { "eu-west-1", "c5n.xlarge" ), - "" // Doesn't matter for cost calculation. + "", // Doesn't matter for cost calculation. + null ) ), Map.entry(HOST_ID_B, @@ -37,7 +38,8 @@ public void testCreateFromRegularSource() { "on-prem-region", "on-prem-instance-type" ), - "" // Doesn't matter for cost calculation. + "", // Doesn't matter for cost calculation. + null ) ) ); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index f0f328e48d00b..9594bd5233a5e 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -44,7 +44,7 @@ public void testParseValidXContent() throws IOException { GetStackTracesRequest request = new GetStackTracesRequest(); request.parseXContent(content); - assertEquals(Integer.valueOf(500), request.getSampleSize()); + assertEquals(500, request.getSampleSize()); assertEquals(Double.valueOf(100.54d), request.getRequestedDuration()); // a basic check suffices here assertEquals("@timestamp", ((RangeQueryBuilder) request.getQuery()).fieldName()); @@ -81,7 +81,7 @@ public void testParseValidXContentWithCustomIndex() throws IOException { GetStackTracesRequest request = new GetStackTracesRequest(); request.parseXContent(content); - assertEquals(Integer.valueOf(2000), request.getSampleSize()); + assertEquals(2000, request.getSampleSize()); assertEquals("my-traces", request.getIndices()); assertEquals("stacktraces", request.getStackTraceIds()); // a basic check suffices here @@ -124,7 +124,7 @@ public void testParseValidXContentWithCustomCostAndCO2Data() throws IOException GetStackTracesRequest request = new GetStackTracesRequest(); request.parseXContent(content); - assertEquals(Integer.valueOf(2000), request.getSampleSize()); + assertEquals(2000, request.getSampleSize()); assertEquals(Double.valueOf(100.54d), request.getRequestedDuration()); assertEquals(Double.valueOf(7.3d), request.getAwsCostFactor()); assertEquals(Double.valueOf(22.4d), request.getCustomCO2PerKWH()); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java index 852790e219a2d..bd66645243a92 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java @@ -36,6 +36,7 @@ import java.util.UUID; public class IndexAllocationTests extends ESTestCase { + private final Index content = idx("content"); private final Index hot = idx("hot"); private final Index warm = idx("warm"); private final Index cold = idx("cold"); @@ -49,6 +50,10 @@ public void testOtherIndicesNotOnWarmColdTier() { assertFalse(IndexAllocation.isAnyOnWarmOrColdTier(clusterState(), List.of(hot, frozen))); } + public void testIndicesOnContentNodeNotOnWarmColdTier() { + assertFalse(IndexAllocation.isAnyOnWarmOrColdTier(clusterState(), List.of(content))); + } + public void testIndicesOnWarmColdTier() { assertTrue(IndexAllocation.isAnyOnWarmOrColdTier(clusterState(), List.of(warm))); assertTrue(IndexAllocation.isAnyOnWarmOrColdTier(clusterState(), List.of(cold))); @@ -73,6 +78,20 @@ private ClusterState clusterState() { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node); + nodesBuilder.add( + DiscoveryNodeUtils.builder("n-" + content.getName()) + .roles( + Set.of( + // content nodes have all roles + DiscoveryNodeRole.DATA_CONTENT_NODE_ROLE, + DiscoveryNodeRole.DATA_HOT_NODE_ROLE, + DiscoveryNodeRole.DATA_WARM_NODE_ROLE, + DiscoveryNodeRole.DATA_COLD_NODE_ROLE, + DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE + ) + ) + .build() + ); nodesBuilder.add(DiscoveryNodeUtils.builder("n-" + hot.getName()).roles(Set.of(DiscoveryNodeRole.DATA_HOT_NODE_ROLE)).build()); nodesBuilder.add(DiscoveryNodeUtils.builder("n-" + warm.getName()).roles(Set.of(DiscoveryNodeRole.DATA_WARM_NODE_ROLE)).build()); nodesBuilder.add(DiscoveryNodeUtils.builder("n-" + cold.getName()).roles(Set.of(DiscoveryNodeRole.DATA_COLD_NODE_ROLE)).build()); @@ -82,7 +101,7 @@ private ClusterState clusterState() { RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); Map indices = new HashMap<>(); - for (Index index : List.of(hot, warm, cold, frozen)) { + for (Index index : List.of(content, hot, warm, cold, frozen)) { indices.put(index.getName(), metadata(index)); ShardRouting shardRouting = ShardRouting.newUnassigned( new ShardId(index, 0), diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Alias.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Alias.java index df9f3c1d20eec..b3c6568ff9cac 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Alias.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Alias.java @@ -106,4 +106,11 @@ public String toString() { public String nodeString() { return child.nodeString() + " AS " + name(); } + + /** + * If the given expression is an alias, return its child - otherwise return as is. + */ + public static Expression unwrap(Expression e) { + return e instanceof Alias as ? as.child() : e; + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expressions.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expressions.java index 41955797a7e1c..de39cfcc0c910 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expressions.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expressions.java @@ -28,7 +28,7 @@ public final class Expressions { private Expressions() {} public static NamedExpression wrapAsNamed(Expression exp) { - return exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.source(), exp.sourceText(), exp); + return exp instanceof NamedExpression ne ? ne : new Alias(exp.source(), exp.sourceText(), exp); } public static List asAttributes(List named) { @@ -136,7 +136,7 @@ public static AttributeSet references(List exps) { } public static String name(Expression e) { - return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.sourceText(); + return e instanceof NamedExpression ne ? ne.name() : e.sourceText(); } public static boolean isNull(Expression e) { @@ -153,8 +153,8 @@ public static List names(Collection e) { } public static Attribute attribute(Expression e) { - if (e instanceof NamedExpression) { - return ((NamedExpression) e).toAttribute(); + if (e instanceof NamedExpression ne) { + return ne.toAttribute(); } return null; } @@ -175,8 +175,8 @@ public static List> aliases(List> aliases = new ArrayList<>(); for (NamedExpression ne : named) { - if (ne instanceof Alias) { - aliases.add(new Tuple<>(ne.toAttribute(), ((Alias) ne).child())); + if (ne instanceof Alias as) { + aliases.add(new Tuple<>(ne.toAttribute(), as.child())); } } return aliases; @@ -218,11 +218,11 @@ public static Pipe pipe(Expression e) { if (e.foldable()) { return new ConstantInput(e.source(), e, e.fold()); } - if (e instanceof NamedExpression) { - return new AttributeInput(e.source(), e, ((NamedExpression) e).toAttribute()); + if (e instanceof NamedExpression ne) { + return new AttributeInput(e.source(), e, ne.toAttribute()); } - if (e instanceof Function) { - return ((Function) e).asPipe(); + if (e instanceof Function f) { + return f.asPipe(); } throw new QlIllegalArgumentException("Cannot create pipe for {}", e); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/aggregate/SpatialAggregateFunction.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/aggregate/SpatialAggregateFunction.java new file mode 100644 index 0000000000000..e73d0e71eb246 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/aggregate/SpatialAggregateFunction.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.expression.function.aggregate; + +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.Objects; + +/** + * All spatial aggregate functions extend this class to enable the planning of reading from doc values for higher performance. + * The AggregateMapper class will generate multiple aggregation functions for each combination, allowing the planner to + * select the best one. + */ +public abstract class SpatialAggregateFunction extends AggregateFunction { + protected final boolean useDocValues; + + protected SpatialAggregateFunction(Source source, Expression field, boolean useDocValues) { + super(source, field); + this.useDocValues = useDocValues; + } + + public abstract SpatialAggregateFunction withDocValues(); + + @Override + public int hashCode() { + // NB: the hashcode is currently used for key generation so + // to avoid clashes between aggs with the same arguments, add the class name as variation + return Objects.hash(getClass(), children(), useDocValues); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + SpatialAggregateFunction other = (SpatialAggregateFunction) obj; + return Objects.equals(other.field(), field()) + && Objects.equals(other.parameters(), parameters()) + && Objects.equals(other.useDocValues, useDocValues); + } + return false; + } + + public boolean useDocValues() { + return useDocValues; + } +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java index 0d659c5dbfb2d..e8c9e3aead362 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java @@ -198,10 +198,10 @@ public static class BinaryLogic extends ExpressionTranslator originalList = (List) originalArgValue; - - if (node.children().equals(originalList)) { + if (col instanceof List originalList && node.children().equals(originalList)) { // The arg we're looking at *is* the children @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results List newChildren = (List) makeListOfSameSizeOtherThan(changedArgType, originalList); B transformed = node.replaceChildren(newChildren); assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newChildren); - } else if (false == originalList.isEmpty() && node.children().containsAll(originalList)) { + } else if (false == col.isEmpty() && node.children().containsAll(col)) { // The arg we're looking at is a collection contained within the children + List originalList = (List) originalArgValue; // First make the new children @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results @@ -368,6 +369,9 @@ private Object makeArg(Class> toBuildClass, Type argType) thro if (pt.getRawType() == List.class) { return makeList(toBuildClass, pt); } + if (pt.getRawType() == Set.class) { + return makeSet(toBuildClass, pt); + } if (pt.getRawType() == EnumSet.class) { @SuppressWarnings("rawtypes") Enum enm = (Enum) makeArg(toBuildClass, pt.getActualTypeArguments()[0]); @@ -474,6 +478,10 @@ public boolean equals(Object obj) { */ return UnresolvedAttributeTests.randomUnresolvedAttribute(); } + if (EnrichPolicy.class == argClass) { + List enrichFields = randomSubsetOf(List.of("e1", "e2", "e3")); + return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); + } if (Pipe.class == argClass) { /* @@ -560,6 +568,18 @@ private List makeList(Class> toBuildClass, ParameterizedTyp return list; } + private Set makeSet(Class> toBuildClass, ParameterizedType listType) throws Exception { + return makeSet(toBuildClass, listType, randomSizeForCollection(toBuildClass)); + } + + private Set makeSet(Class> toBuildClass, ParameterizedType listType, int size) throws Exception { + Set list = new HashSet<>(); + for (int i = 0; i < size; i++) { + list.add(makeArg(toBuildClass, listType.getActualTypeArguments()[0])); + } + return list; + } + private Object makeMap(Class> toBuildClass, ParameterizedType pt) throws Exception { Map map = new HashMap<>(); int size = randomSizeForCollection(toBuildClass); diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index ca650bf29662f..fa53027e43901 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -55,7 +55,7 @@ public void testParsing() { for (int i = 0; i < 10; i++) { SpatialCoordinateTypes coordType = type.getKey(); Point point = type.getValue().randomPoint.get(); - assertEquals(coordType.wkbAsString(coordType.pointAsWKB(point)), coordType.pointAsString(point)); + assertEquals(coordType.wkbToWkt(coordType.asWkb(point)), coordType.asWkt(point)); } } } diff --git a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java index f9ba295d4dd4d..4441ec70f74aa 100644 --- a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java +++ b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/RepositoriesMeteringPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -42,6 +43,7 @@ public final class RepositoriesMeteringPlugin extends Plugin implements ActionPl @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/action/ClearRepositoriesMeteringArchiveAction.java b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/action/ClearRepositoriesMeteringArchiveAction.java index 30b5b50be89c6..714b8a7d9965a 100644 --- a/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/action/ClearRepositoriesMeteringArchiveAction.java +++ b/x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/action/ClearRepositoriesMeteringArchiveAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.repositories.metering.action; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public final class ClearRepositoriesMeteringArchiveAction extends ActionType { public static final ClearRepositoriesMeteringArchiveAction INSTANCE = new ClearRepositoriesMeteringArchiveAction(); @@ -16,6 +15,6 @@ public final class ClearRepositoriesMeteringArchiveAction extends ActionType { public static final RepositoriesMeteringAction INSTANCE = new RepositoriesMeteringAction(); @@ -16,6 +15,6 @@ public final class RepositoriesMeteringAction extends ActionType getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -94,7 +96,7 @@ public List getRestHandlers( Supplier nodesInCluster ) { return Arrays.asList( - new RestRollupSearchAction(), + new RestRollupSearchAction(namedWriteableRegistry), new RestPutRollupJobAction(), new RestStartRollupJobAction(), new RestStopRollupJobAction(), diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index f7394ec12a779..987e3e99bb91a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -257,9 +257,7 @@ private static SearchResponse doCombineResponse( AggregationReduceContext.Builder reduceContextBuilder ) { - final InternalAggregations liveAggs = liveResponse != null - ? (InternalAggregations) liveResponse.getAggregations() - : InternalAggregations.EMPTY; + final InternalAggregations liveAggs = liveResponse != null ? liveResponse.getAggregations() : InternalAggregations.EMPTY; int missingRollupAggs = rolledResponses.stream().mapToInt(searchResponse -> { if (searchResponse == null @@ -386,7 +384,7 @@ private static List unrollAgg( count = getAggCount(agg, rolled.getAsMap()); } - return unrollAgg((InternalAggregation) agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); + return unrollAgg(agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); }).collect(Collectors.toList()); } @@ -580,7 +578,7 @@ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket currentSubAgg = currentTree.getAggregations().get(subAgg.getName()); } - return unrollAgg((InternalAggregation) subAgg, originalSubAgg, currentSubAgg, count); + return unrollAgg(subAgg, originalSubAgg, currentSubAgg, count); }) .collect(Collectors.toList()) ); @@ -619,7 +617,7 @@ private static InternalAggregation unrollMetric(SingleValue metric, long count) } } - private static long getAggCount(Aggregation agg, Map aggMap) { + private static long getAggCount(Aggregation agg, Map aggMap) { String countPath = null; if (agg.getType().equals(DateHistogramAggregationBuilder.NAME) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 2df415fbe02dc..6bd29ddb52301 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -454,6 +454,9 @@ public void onResponse(SearchResponse response) { channel.sendResponse(response); } catch (Exception e) { onFailure(e); + } finally { + // TODO - avoid the implicit incref elsewhere and then replace this whole thing with a ChannelActionListener + response.decRef(); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 095eb141bb39d..2b995b0e56da0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -62,7 +62,7 @@ static Stream processBuckets( // Put the composite keys into a treemap so that the key iteration order is consistent // TODO would be nice to avoid allocating this treemap in the future TreeMap keys = new TreeMap<>(b.getKey()); - List metrics = b.getAggregations().asList(); + List metrics = b.getAggregations().asList(); RollupIDGenerator idGenerator = new RollupIDGenerator(jobId); Map doc = Maps.newMapWithExpectedSize(keys.size() + metrics.size()); @@ -124,7 +124,7 @@ private static void processKeys( }); } - private static void processMetrics(List metrics, Map doc) { + private static void processMetrics(List metrics, Map doc) { List emptyCounts = new ArrayList<>(); metrics.forEach(m -> { if (m instanceof InternalNumericMetricsAggregation.SingleValue) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 68b5b8953ccb7..bf979f9deabf0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -11,12 +11,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.scheduler.SchedulerEngine; @@ -164,10 +164,10 @@ protected void doSaveState(IndexerState indexerState, Map positi @Override protected void onFinish(ActionListener listener) { final RollupJobConfig jobConfig = job.getConfig(); - final ActionListener refreshResponseActionListener = new ActionListener<>() { + final ActionListener refreshResponseActionListener = new ActionListener<>() { @Override - public void onResponse(RefreshResponse refreshResponse) { + public void onResponse(BroadcastResponse refreshResponse) { logger.trace("refreshing rollup index {} successful for job {}", jobConfig.getRollupIndex(), jobConfig.getId()); listener.onResponse(null); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 266f515d1dbb6..68c8fba19af4e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; @@ -25,6 +26,12 @@ public class RestRollupSearchAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); + private final NamedWriteableRegistry namedWriteableRegistry; + + public RestRollupSearchAction(NamedWriteableRegistry namedWriteableRegistry) { + this.namedWriteableRegistry = namedWriteableRegistry; + } + @Override public List routes() { return List.of( @@ -43,7 +50,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient searchRequest, restRequest, parser, - client.getNamedWriteableRegistry(), + namedWriteableRegistry, size -> searchRequest.source().size(size) ) ); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index e9f882731521f..ae0949f5bedfa 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -187,7 +186,7 @@ public void testMissingLiveIndex() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggsWithout = InternalAggregations.from(aggTree); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse msearch = new MultiSearchResponse( @@ -230,7 +229,7 @@ public void testRolledMissingAggs() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertThat(responseAggs.asList().size(), equalTo(0)); } finally { // this SearchResponse is not a mock, so must be decRef'd @@ -311,7 +310,7 @@ public void testTranslateRollup() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggs = InternalAggregations.from(aggTree); + InternalAggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); MultiSearchResponse multiSearchResponse = new MultiSearchResponse( new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, @@ -325,7 +324,7 @@ public void testTranslateRollup() throws Exception { ); try { assertNotNull(finalResponse); - Aggregations responseAggs = finalResponse.getAggregations(); + InternalAggregations responseAggs = finalResponse.getAggregations(); assertNotNull(finalResponse); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), equalTo(5.0)); @@ -365,7 +364,7 @@ public void testMissingFilter() { Max protoMax = mock(Max.class); when(protoMax.getName()).thenReturn("foo"); protoAggTree.add(protoMax); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -374,7 +373,7 @@ public void testMissingFilter() { Max max = mock(Max.class); when(max.getName()).thenReturn("bizzbuzz"); aggTreeWithoutFilter.add(max); - Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -396,7 +395,7 @@ public void testMatchingNameNotFilter() { Max protoMax = mock(Max.class); when(protoMax.getName()).thenReturn("foo"); protoAggTree.add(protoMax); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -404,7 +403,7 @@ public void testMatchingNameNotFilter() { List aggTreeWithoutFilter = new ArrayList<>(1); Max max = new Max("filter_foo", 0, DocValueFormat.RAW, null); aggTreeWithoutFilter.add(max); - Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -426,7 +425,7 @@ public void testSimpleReduction() throws Exception { List protoAggTree = new ArrayList<>(1); InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null); protoAggTree.add(internalAvg); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -458,7 +457,7 @@ public void testSimpleReduction() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggsWithout = InternalAggregations.from(aggTree); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -474,7 +473,7 @@ public void testSimpleReduction() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), equalTo(5.0)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 32b9c2df962a9..7971695ecabc1 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -737,7 +736,7 @@ public void testRollupOnly() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggs = InternalAggregations.from(aggTree); + InternalAggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[] { item }, 1); @@ -749,7 +748,7 @@ public void testRollupOnly() throws Exception { ); try { assertNotNull(r); - Aggregations responseAggs = r.getAggregations(); + InternalAggregations responseAggs = r.getAggregations(); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), IsEqual.equalTo(5.0)); } finally { @@ -844,7 +843,7 @@ public void testBoth() throws Exception { List protoAggTree = new ArrayList<>(1); InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null); protoAggTree.add(internalAvg); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -874,7 +873,7 @@ public void testBoth() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggsWithout = InternalAggregations.from(aggTree); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -893,7 +892,7 @@ public void testBoth() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), IsEqual.equalTo(5.0)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index d680752efc498..a1eac8c781390 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -19,10 +19,10 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; @@ -306,7 +306,7 @@ public void testKeyOrdering() { keys = shuffleMap(keys, Collections.emptySet()); when(bucket.getKey()).thenReturn(keys); - List list = new ArrayList<>(3); + List list = new ArrayList<>(3); InternalNumericMetricsAggregation.SingleValue mockAgg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg.getName()).thenReturn("123"); list.add(mockAgg); @@ -321,7 +321,7 @@ public void testKeyOrdering() { Collections.shuffle(list, random()); - Aggregations aggs = new Aggregations(list); + InternalAggregations aggs = InternalAggregations.from(list); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); @@ -357,7 +357,7 @@ public void testKeyOrderingLong() { keys = shuffleMap(keys, Collections.emptySet()); when(bucket.getKey()).thenReturn(keys); - List list = new ArrayList<>(3); + List list = new ArrayList<>(3); InternalNumericMetricsAggregation.SingleValue mockAgg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg.getName()).thenReturn("123"); list.add(mockAgg); @@ -372,7 +372,7 @@ public void testKeyOrderingLong() { Collections.shuffle(list, random()); - Aggregations aggs = new Aggregations(list); + InternalAggregations aggs = InternalAggregations.from(list); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); @@ -400,7 +400,7 @@ public void testNullKeys() { keys.put("abc.histogram", null); when(bucket.getKey()).thenReturn(keys); - Aggregations aggs = new Aggregations(Collections.emptyList()); + InternalAggregations aggs = InternalAggregations.from(Collections.emptyList()); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index e2cb5a5bc61b0..34f1f2f97a328 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -44,10 +44,10 @@ import org.elasticsearch.index.query.SearchExecutionContextHelper; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -860,7 +860,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener .iterator() .next(); - CompositeAggregation result = null; + InternalComposite result = null; try { result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldTypes).withQuery(query)); } catch (IOException e) { @@ -870,7 +870,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener listener, new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, - new Aggregations(Collections.singletonList(result)), + InternalAggregations.from(Collections.singletonList(result)), null, false, null, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index bb910da326e0a..24c034358be74 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -15,13 +15,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; @@ -31,7 +29,6 @@ import org.hamcrest.Matchers; import org.mockito.stubbing.Answer; -import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -46,7 +43,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; public class RollupIndexerStateTests extends ESTestCase { private static class EmptyRollupIndexer extends RollupIndexer { @@ -71,38 +70,12 @@ private static class EmptyRollupIndexer extends RollupIndexer { @Override protected void doNextSearch(long waitTimeInNanos, ActionListener nextPhase) { - // TODO Should use InternalComposite constructor but it is package protected in core. - Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { - @Override - public List getBuckets() { - return Collections.emptyList(); - } - - @Override - public Map afterKey() { - return null; - } - - @Override - public String getName() { - return AGGREGATION_NAME; - } - - @Override - public String getType() { - return null; - } + InternalComposite composite = mock(InternalComposite.class); + when(composite.getBuckets()).thenReturn(List.of()); + when(composite.getName()).thenReturn(AGGREGATION_NAME); - @Override - public Map getMetadata() { - return null; - } + InternalAggregations aggs = InternalAggregations.from(List.of(composite)); - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - })); ActionListener.respondAndRelease( nextPhase, new SearchResponse( @@ -442,41 +415,18 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener } catch (InterruptedException e) { throw new IllegalStateException(e); } - // TODO Should use InternalComposite constructor but it is package protected in core. - Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { - @Override - public List getBuckets() { - // Abort immediately before we are attempting to finish the job because the response - // was empty - state.set(IndexerState.ABORTING); - return Collections.emptyList(); - } - - @Override - public Map afterKey() { - return null; - } - @Override - public String getName() { - return AGGREGATION_NAME; - } - - @Override - public String getType() { - return null; - } + InternalComposite composite = mock(InternalComposite.class); + when(composite.getBuckets()).thenAnswer(invocation -> { + // Abort immediately before we are attempting to finish the job because the response + // was empty + state.set(IndexerState.ABORTING); + return List.of(); + }); + when(composite.getName()).thenReturn(AGGREGATION_NAME); - @Override - public Map getMetadata() { - return null; - } + InternalAggregations aggs = InternalAggregations.from(List.of(composite)); - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - })); ActionListener.respondAndRelease( nextPhase, new SearchResponse( @@ -638,64 +588,18 @@ public void testUnknownKey() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { - Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { - @Override - public List getBuckets() { - Bucket b = new Bucket() { - @Override - public Map getKey() { - return Collections.singletonMap("foo", "bar"); - } - - @Override - public String getKeyAsString() { - return null; - } - - @Override - public long getDocCount() { - return 1; - } - @Override - public Aggregations getAggregations() { - return InternalAggregations.EMPTY; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - }; + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); + when(bucket.getKey()).thenReturn(Map.of("foo", "bar")); + when(bucket.getDocCount()).thenReturn(1L); + when(bucket.getAggregations()).thenReturn(InternalAggregations.EMPTY); - return Collections.singletonList(b); - } + InternalComposite composite = mock(InternalComposite.class); + when(composite.getBuckets()).thenReturn(List.of(bucket)); + when(composite.getName()).thenReturn(RollupField.NAME); - @Override - public Map afterKey() { - return null; - } + InternalAggregations aggs = InternalAggregations.from(List.of(composite)); - @Override - public String getName() { - return RollupField.NAME; - } - - @Override - public String getType() { - return null; - } - - @Override - public Map getMetadata() { - return null; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - })); return new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, @@ -767,65 +671,20 @@ public void testFailureWhileStopping() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { - Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { - @Override - public List getBuckets() { - Bucket b = new Bucket() { - @Override - public Map getKey() { - state.set(IndexerState.STOPPING); // <- Force a stop so we can see how error + non-INDEXING state is handled - return Collections.singletonMap("foo", "bar"); // This will throw an exception - } - - @Override - public String getKeyAsString() { - return null; - } - - @Override - public long getDocCount() { - return 1; - } - - @Override - public Aggregations getAggregations() { - return InternalAggregations.EMPTY; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - }; - - return Collections.singletonList(b); - } - @Override - public Map afterKey() { - return null; - } - - @Override - public String getName() { - return RollupField.NAME; - } - - @Override - public String getType() { - return null; - } + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); + when(bucket.getKey()).thenAnswer(invocation -> { + state.set(IndexerState.STOPPING); // <- Force a stop so we can see how error + non-INDEXING state is handled + return Collections.singletonMap("foo", "bar"); // This will throw an exception + }); + when(bucket.getDocCount()).thenReturn(1L); + when(bucket.getAggregations()).thenReturn(InternalAggregations.EMPTY); - @Override - public Map getMetadata() { - return null; - } + InternalComposite composite = mock(InternalComposite.class); + when(composite.getBuckets()).thenReturn(List.of(bucket)); + when(composite.getName()).thenReturn(RollupField.NAME); - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - })); + InternalAggregations aggs = InternalAggregations.from(List.of(composite)); return new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, @@ -947,64 +806,18 @@ public void testBulkFailure() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { - Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { - @Override - public List getBuckets() { - Bucket b = new Bucket() { - @Override - public Map getKey() { - return Collections.singletonMap("foo.terms", "bar"); - } - - @Override - public String getKeyAsString() { - return null; - } - - @Override - public long getDocCount() { - return 1; - } - @Override - public Aggregations getAggregations() { - return InternalAggregations.EMPTY; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - }; + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); + when(bucket.getKey()).thenReturn(Map.of("foo.terms", "bar")); + when(bucket.getDocCount()).thenReturn(1L); + when(bucket.getAggregations()).thenReturn(InternalAggregations.EMPTY); - return Collections.singletonList(b); - } + InternalComposite composite = mock(InternalComposite.class); + when(composite.getName()).thenReturn(RollupField.NAME); + when(composite.getBuckets()).thenReturn(List.of(bucket)); - @Override - public Map afterKey() { - return null; - } + InternalAggregations aggs = InternalAggregations.from(List.of(composite)); - @Override - public String getName() { - return RollupField.NAME; - } - - @Override - public String getType() { - return null; - } - - @Override - public Map getMetadata() { - return null; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - })); return new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 7fcde59f73088..8b63b76cdf248 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -11,10 +11,10 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.Settings; @@ -23,8 +23,8 @@ import org.elasticsearch.node.Node; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; @@ -590,7 +590,7 @@ public void testTriggerWithoutHeaders() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -608,10 +608,10 @@ public void testTriggerWithoutHeaders() throws Exception { assertTrue(threadContext.getHeaders().isEmpty()); SearchResponse r = mock(SearchResponse.class); when(r.getShardFailures()).thenReturn(ShardSearchFailure.EMPTY_ARRAY); - CompositeAggregation compositeAgg = mock(CompositeAggregation.class); + InternalComposite compositeAgg = mock(InternalComposite.class); when(compositeAgg.getBuckets()).thenReturn(Collections.emptyList()); when(compositeAgg.getName()).thenReturn(RollupField.NAME); - Aggregations aggs = new Aggregations(Collections.singletonList(compositeAgg)); + InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(compositeAgg)); when(r.getAggregations()).thenReturn(aggs); // Wait before progressing @@ -697,7 +697,7 @@ public void testTriggerWithHeaders() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -717,10 +717,10 @@ public void testTriggerWithHeaders() throws Exception { SearchResponse r = mock(SearchResponse.class); when(r.getShardFailures()).thenReturn(ShardSearchFailure.EMPTY_ARRAY); - CompositeAggregation compositeAgg = mock(CompositeAggregation.class); + InternalComposite compositeAgg = mock(InternalComposite.class); when(compositeAgg.getBuckets()).thenReturn(Collections.emptyList()); when(compositeAgg.getName()).thenReturn(RollupField.NAME); - Aggregations aggs = new Aggregations(Collections.singletonList(compositeAgg)); + InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(compositeAgg)); when(r.getAggregations()).thenReturn(aggs); // Wait before progressing @@ -806,7 +806,7 @@ public void testSaveStateChangesIDScheme() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -827,10 +827,10 @@ public void testSaveStateChangesIDScheme() throws Exception { SearchResponse r = mock(SearchResponse.class); when(r.getShardFailures()).thenReturn(ShardSearchFailure.EMPTY_ARRAY); - CompositeAggregation compositeAgg = mock(CompositeAggregation.class); + InternalComposite compositeAgg = mock(InternalComposite.class); when(compositeAgg.getBuckets()).thenReturn(Collections.emptyList()); when(compositeAgg.getName()).thenReturn(RollupField.NAME); - Aggregations aggs = new Aggregations(Collections.singletonList(compositeAgg)); + InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(compositeAgg)); when(r.getAggregations()).thenReturn(aggs); // Wait before progressing diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java index 615261a302877..45f1fc4939bc1 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java @@ -57,7 +57,7 @@ public class PinnedQueryBuilder extends AbstractQueryBuilder public static final ParseField DOCS_FIELD = new ParseField("docs"); public static final ParseField ORGANIC_QUERY_FIELD = new ParseField("organic"); - private static final TransportVersion OPTIONAL_INDEX_IN_DOCS_VERSION = TransportVersions.PINNED_QUERY_OPTIONAL_INDEX; + private static final TransportVersion OPTIONAL_INDEX_IN_DOCS_VERSION = TransportVersions.V_8_11_X; private final List ids; private final List docs; diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java index 7ee81b444af46..d5fcf0853cdae 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.Metadata; @@ -25,13 +27,11 @@ import org.elasticsearch.license.PostStartTrialRequest; import org.elasticsearch.license.PostStartTrialResponse; import org.elasticsearch.license.TransportDeleteLicenseAction; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotAction; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheAction; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheRequest; -import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheResponse; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsRequest; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsResponse; @@ -78,7 +78,7 @@ public void createAndMountSearchableSnapshot() throws Exception { assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); ensureGreen(indexName); - assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(TransportDeleteLicenseAction.TYPE, new AcknowledgedRequest.Plain()).get()); assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); ensureClusterSizeConsistency(); @@ -121,11 +121,11 @@ public void testStatsRequiresLicense() throws ExecutionException, InterruptedExc } public void testClearCacheRequiresLicense() throws ExecutionException, InterruptedException { - final ActionFuture future = client().execute( + final ActionFuture future = client().execute( ClearSearchableSnapshotsCacheAction.INSTANCE, new ClearSearchableSnapshotsCacheRequest(indexName) ); - final ClearSearchableSnapshotsCacheResponse response = future.get(); + final BroadcastResponse response = future.get(); assertThat(response.getTotalShards(), greaterThan(0)); assertThat(response.getSuccessfulShards(), equalTo(0)); for (DefaultShardOperationFailedException shardFailure : response.getShardFailures()) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotDiskThresholdIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotDiskThresholdIntegTests.java index 8d115d0f19580..e4f9d530e83df 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotDiskThresholdIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotDiskThresholdIntegTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterInfoServiceUtils; +import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.DiskUsageIntegTestCase; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.metadata.RepositoryMetadata; @@ -232,10 +233,8 @@ public void testHighWatermarkCanNotBeExceededOnColdNode() throws Exception { final var masterInfoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(ClusterInfoService.class); ClusterInfoServiceUtils.refresh(masterInfoService); - assertThat( - masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(otherDataNodeId).getTotalBytes(), - equalTo(totalSpace) - ); + DiskUsage usage = masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(otherDataNodeId); + assertThat(usage.totalBytes(), equalTo(totalSpace)); mountIndices(indicesStoresSizes.keySet(), "mounted-", repositoryName, snapshot, storage); @@ -309,10 +308,8 @@ public void testHighWatermarkCanNotBeExceededWithInitializingSearchableSnapshots ClusterInfoService.class ); ClusterInfoServiceUtils.refresh(masterInfoService); - assertThat( - masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(coldNodeId).getTotalBytes(), - equalTo(totalSpace) - ); + DiskUsage usage = masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(coldNodeId); + assertThat(usage.totalBytes(), equalTo(totalSpace)); String prefix = "mounted-"; mountIndices(indicesToBeMounted.keySet(), prefix, repositoryName, snapshotName, FULL_COPY); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java index 37b3ecfd36959..c1c40acbd43c5 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java @@ -8,10 +8,9 @@ package org.elasticsearch.xpack.searchablesnapshots.cache.blob; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -139,7 +138,7 @@ public void testBlobStoreCache() throws Exception { if (randomBoolean()) { logger.info("--> force-merging index before snapshotting"); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).get(); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(numberOfShards.totalNumShards)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); } @@ -355,7 +354,7 @@ private Client systemClient() { private void refreshSystemIndex() { try { - final RefreshResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); + final BroadcastResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); assertThat(refreshResponse.getSuccessfulShards(), greaterThan(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 981ffe2832e66..56074f97650f0 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -329,7 +329,7 @@ private long numberOfEntriesInCache() { private void refreshSystemIndex(boolean failIfNotExist) { try { - final RefreshResponse refreshResponse = systemClient().admin() + final BroadcastResponse refreshResponse = systemClient().admin() .indices() .prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(failIfNotExist ? RefreshRequest.DEFAULT_INDICES_OPTIONS : IndicesOptions.LENIENT_EXPAND_OPEN) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java index 42ac63579b6c6..b260f6cf2a891 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.searchablesnapshots.cache.shared; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.ShardRouting; @@ -22,7 +23,6 @@ import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheAction; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheRequest; -import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheResponse; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction.NodeCachesStatsResponse; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction.NodesCachesStatsResponse; @@ -117,7 +117,7 @@ public void testNodesCachesStats() throws Exception { assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); - final ClearSearchableSnapshotsCacheResponse clearCacheResponse = client().execute( + final BroadcastResponse clearCacheResponse = client().execute( ClearSearchableSnapshotsCacheAction.INSTANCE, new ClearSearchableSnapshotsCacheRequest(mountedIndex) ).actionGet(); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 83a38a4d0b328..b08f31083c973 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -500,6 +501,7 @@ public Optional getEngineFactory(IndexSettings indexSettings) { public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java index 9628bc75cd337..897eac64a79fb 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.searchablesnapshots.action; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class ClearSearchableSnapshotsCacheAction extends ActionType { +public class ClearSearchableSnapshotsCacheAction extends ActionType { public static final ClearSearchableSnapshotsCacheAction INSTANCE = new ClearSearchableSnapshotsCacheAction(); static final String NAME = "cluster:admin/xpack/searchable_snapshots/cache/clear"; private ClearSearchableSnapshotsCacheAction() { - super(NAME, ClearSearchableSnapshotsCacheResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java deleted file mode 100644 index 23a566f23d71b..0000000000000 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.searchablesnapshots.action; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; -import java.util.List; - -public class ClearSearchableSnapshotsCacheResponse extends BroadcastResponse { - - ClearSearchableSnapshotsCacheResponse(StreamInput in) throws IOException { - super(in); - } - - ClearSearchableSnapshotsCacheResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } -} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/SearchableSnapshotsStatsAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/SearchableSnapshotsStatsAction.java index 362da39a00cae..c380fa65e1870 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/SearchableSnapshotsStatsAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/SearchableSnapshotsStatsAction.java @@ -14,6 +14,6 @@ public class SearchableSnapshotsStatsAction extends ActionType { @Inject @@ -56,11 +57,11 @@ protected EmptyResult readShardResult(StreamInput in) { } @Override - protected ResponseFactory getResponseFactory( + protected ResponseFactory getResponseFactory( ClearSearchableSnapshotsCacheRequest request, ClusterState clusterState ) { - return (totalShards, successfulShards, failedShards, emptyResults, shardFailures) -> new ClearSearchableSnapshotsCacheResponse( + return (totalShards, successfulShards, failedShards, emptyResults, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoAction.java index 42e5922f46ab0..cc99e9514df67 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoAction.java @@ -31,7 +31,7 @@ public class FrozenCacheInfoAction extends ActionType { public static final FrozenCacheInfoAction INSTANCE = new FrozenCacheInfoAction(); private FrozenCacheInfoAction() { - super(NAME, FrozenCacheInfoResponse::new); + super(NAME); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoNodeAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoNodeAction.java index bb34940628d23..756f9c191a561 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoNodeAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/FrozenCacheInfoNodeAction.java @@ -30,7 +30,7 @@ public class FrozenCacheInfoNodeAction extends ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final CacheService cacheService; diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/TransportSearchableSnapshotsNodeCachesStatsAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/TransportSearchableSnapshotsNodeCachesStatsAction.java index c192d5bff8eb9..9a40b39083139 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/TransportSearchableSnapshotsNodeCachesStatsAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/TransportSearchableSnapshotsNodeCachesStatsAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; @@ -52,7 +51,7 @@ public class TransportSearchableSnapshotsNodeCachesStatsAction extends Transport public static final String ACTION_NAME = "cluster:admin/xpack/searchable_snapshots/cache/stats"; - public static final ActionType TYPE = new ActionType<>(ACTION_NAME, Writeable.Reader.localOnly()); + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final Supplier> frozenCacheService; private final XPackLicenseState licenseState; diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index a599aeaeada71..5f083d568fed8 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lucene.store.ESIndexInputTestCase; @@ -72,6 +71,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLengthBetween; import static org.elasticsearch.blobcache.shared.SharedBytes.PAGE_SIZE; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.xpack.searchablesnapshots.cache.common.TestUtils.pageAligned; import static org.elasticsearch.xpack.searchablesnapshots.cache.common.TestUtils.randomPopulateAndReads; @@ -237,18 +237,19 @@ private static ByteSizeValue pageAlignedBetween(ByteSizeValue min, ByteSizeValue } protected static SearchableSnapshotRecoveryState createRecoveryState(boolean finalizedDone) { - ShardRouting shardRouting = TestShardRouting.newShardRouting( + ShardRouting shardRouting = shardRoutingBuilder( new ShardId(randomAlphaOfLength(10), randomAlphaOfLength(10), 0), randomAlphaOfLength(10), true, - ShardRoutingState.INITIALIZING, + ShardRoutingState.INITIALIZING + ).withRecoverySource( new RecoverySource.SnapshotRecoverySource( UUIDs.randomBase64UUID(), new Snapshot("repo", new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())), IndexVersion.current(), new IndexId("some_index", UUIDs.randomBase64UUID(random())) ) - ); + ).build(); DiscoveryNode targetNode = DiscoveryNodeUtils.create("local"); SearchableSnapshotRecoveryState recoveryState = new SearchableSnapshotRecoveryState(shardRouting, targetNode, null); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryStatsTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryStatsTests.java index 90101eca2573a..a39031037eacc 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryStatsTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryStatsTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; @@ -52,6 +51,7 @@ import java.util.function.LongSupplier; import static org.elasticsearch.blobcache.BlobCacheUtils.toIntBytes; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_CACHE_ENABLED_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_UNCACHED_CHUNK_SIZE_SETTING; @@ -680,18 +680,19 @@ protected IndexInputStats createIndexInputStats(long numFiles, long totalSize, l cacheService.start(); assertThat(directory.getStats(fileName), nullValue()); - ShardRouting shardRouting = TestShardRouting.newShardRouting( + ShardRouting shardRouting = shardRoutingBuilder( new ShardId(randomAlphaOfLength(10), randomAlphaOfLength(10), 0), randomAlphaOfLength(10), true, - ShardRoutingState.INITIALIZING, + ShardRoutingState.INITIALIZING + ).withRecoverySource( new RecoverySource.SnapshotRecoverySource( UUIDs.randomBase64UUID(), new Snapshot("repo", new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())), IndexVersion.current(), new IndexId("some_index", UUIDs.randomBase64UUID(random())) ) - ); + ).build(); DiscoveryNode targetNode = DiscoveryNodeUtils.create("local"); RecoveryState recoveryState = new SearchableSnapshotRecoveryState(shardRouting, targetNode, null); final PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index acb802743586c..08496060f431b 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -31,8 +31,10 @@ dependencies { testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:reindex') testImplementation project(':modules:data-streams') + testImplementation project(':modules:lang-mustache') + testImplementation project(':modules:mapper-extras') + testImplementation project(':modules:parent-join') testImplementation project(':modules:rest-root') - testImplementation project(":client:rest-high-level") testImplementation(testArtifact(project(xpackModule('core')))) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java index d05c3c85cd07f..2aa96ffc4e443 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; @@ -36,8 +37,11 @@ import java.util.Locale; import java.util.Map; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public abstract class AbstractRemoteClusterSecurityTestCase extends ESRestTestCase { @@ -49,8 +53,10 @@ public abstract class AbstractRemoteClusterSecurityTestCase extends ESRestTestCa protected static final String REMOTE_TRANSFORM_USER = "remote_transform_user"; protected static final String REMOTE_SEARCH_ROLE = "remote_search"; protected static final String REMOTE_CLUSTER_ALIAS = "my_remote_cluster"; + private static final String KEYSTORE_PASSWORD = "keystore-password"; protected static LocalClusterConfigProvider commonClusterConfig = cluster -> cluster.module("analysis-common") + .keystorePassword(KEYSTORE_PASSWORD) .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") .setting("xpack.security.authc.token.enabled", "true") @@ -191,21 +197,72 @@ protected void configureRemoteCluster( boolean skipUnavailable ) throws Exception { // For configurable remote cluster security, this method assumes the cross cluster access API key is already configured in keystore + putRemoteClusterSettings(clusterAlias, targetFulfillingCluster, basicSecurity, isProxyMode, skipUnavailable); + + // Ensure remote cluster is connected + checkRemoteConnection(clusterAlias, targetFulfillingCluster, basicSecurity, isProxyMode); + } + + protected void configureRemoteClusterCredentials(String clusterAlias, String credentials, MutableSettingsProvider keystoreSettings) + throws IOException { + keystoreSettings.put("cluster.remote." + clusterAlias + ".credentials", credentials); + queryCluster.updateStoredSecureSettings(); + reloadSecureSettings(); + } + + protected void removeRemoteClusterCredentials(String clusterAlias, MutableSettingsProvider keystoreSettings) throws IOException { + keystoreSettings.remove("cluster.remote." + clusterAlias + ".credentials"); + queryCluster.updateStoredSecureSettings(); + reloadSecureSettings(); + } + + @SuppressWarnings("unchecked") + private void reloadSecureSettings() throws IOException { + final Request request = new Request("POST", "/_nodes/reload_secure_settings"); + request.setJsonEntity("{\"secure_settings_password\":\"" + KEYSTORE_PASSWORD + "\"}"); + final Response reloadResponse = adminClient().performRequest(request); + assertOK(reloadResponse); + final Map map = entityAsMap(reloadResponse); + assertThat(map.get("nodes"), instanceOf(Map.class)); + final Map nodes = (Map) map.get("nodes"); + assertThat(nodes, is(not(anEmptyMap()))); + for (Map.Entry entry : nodes.entrySet()) { + assertThat(entry.getValue(), instanceOf(Map.class)); + final Map node = (Map) entry.getValue(); + assertThat(node.get("reload_exception"), nullValue()); + } + } + + protected void putRemoteClusterSettings( + String clusterAlias, + ElasticsearchCluster targetFulfillingCluster, + boolean basicSecurity, + boolean isProxyMode, + boolean skipUnavailable + ) throws IOException { final Settings.Builder builder = Settings.builder(); final String remoteClusterEndpoint = basicSecurity ? targetFulfillingCluster.getTransportEndpoint(0) : targetFulfillingCluster.getRemoteClusterServerEndpoint(0); if (isProxyMode) { builder.put("cluster.remote." + clusterAlias + ".mode", "proxy") - .put("cluster.remote." + clusterAlias + ".proxy_address", remoteClusterEndpoint); + .put("cluster.remote." + clusterAlias + ".proxy_address", remoteClusterEndpoint) + .putNull("cluster.remote." + clusterAlias + ".seeds"); } else { builder.put("cluster.remote." + clusterAlias + ".mode", "sniff") - .putList("cluster.remote." + clusterAlias + ".seeds", remoteClusterEndpoint); + .putList("cluster.remote." + clusterAlias + ".seeds", remoteClusterEndpoint) + .putNull("cluster.remote." + clusterAlias + ".proxy_address"); } builder.put("cluster.remote." + clusterAlias + ".skip_unavailable", skipUnavailable); updateClusterSettings(builder.build()); + } - // Ensure remote cluster is connected + protected void checkRemoteConnection( + String clusterAlias, + ElasticsearchCluster targetFulfillingCluster, + boolean basicSecurity, + boolean isProxyMode + ) throws Exception { final Request remoteInfoRequest = new Request("GET", "/_remote/info"); assertBusy(() -> { final Response remoteInfoResponse = adminClient().performRequest(remoteInfoRequest); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index 90e3d9e42b744..fbdc5782a7ddf 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -162,7 +162,7 @@ public void testRcs1Setup() throws Exception { // First migrate to RCS 2.0 @Order(30) - public void testFollowerClusterRestartForRcs2() throws IOException { + public void testFollowerClusterCredentialsChangeForRcs2() throws IOException { // Update the ccr_user_role so that it is sufficient for both RCS 1.0 and 2.0 final Request putRoleRequest = new Request("POST", "/_security/role/" + CCR_USER_ROLE); putRoleRequest.setJsonEntity(""" @@ -202,9 +202,7 @@ public void testFollowerClusterRestartForRcs2() throws IOException { } ] }"""); - keystoreSettings.put("cluster.remote.my_remote_cluster.credentials", (String) crossClusterAccessApiKey.get("encoded")); - queryCluster.restart(false); - closeClients(); + configureRemoteClusterCredentials("my_remote_cluster", (String) crossClusterAccessApiKey.get("encoded"), keystoreSettings); } @Order(40) @@ -239,7 +237,7 @@ public void testRcs2Setup() throws Exception { // Second migrate back to RCS 1.0 @Order(50) - public void testFollowerClusterRestartAgainForRcs1() throws IOException { + public void testFollowerClusterCredentialsChangeForRcs1() throws IOException { // Remove the RCS 2.0 remote cluster removeRemoteCluster(); @@ -266,9 +264,7 @@ public void testFollowerClusterRestartAgainForRcs1() throws IOException { indexDocsToLeaderCluster("metrics-004", 1); // Remove remote cluster credentials to revert back to RCS 1.0 - keystoreSettings.remove("cluster.remote.my_remote_cluster.credentials"); - queryCluster.restart(false); - closeClients(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); } @Order(60) @@ -373,7 +369,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr } finally { searchResponse.decRef(); } - }, 30, TimeUnit.SECONDS); + }, 60, TimeUnit.SECONDS); } private void assertFollowerInfo(String followIndexName, String leaderClusterName, String leadIndexName, String status) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityFcActionAuthorizationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityFcActionAuthorizationIT.java index 8b54bd060fa19..2dbb716e8cfa6 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityFcActionAuthorizationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityFcActionAuthorizationIT.java @@ -8,14 +8,19 @@ package org.elasticsearch.xpack.remotecluster; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.MockSecureSettings; @@ -50,6 +55,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.remotecluster.AbstractRemoteClusterSecurityTestCase.PASS; @@ -99,7 +105,27 @@ public void tearDown() throws Exception { ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } - public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws IOException { + private static Response executeRemote( + RemoteClusterClient client, + RemoteClusterActionType action, + Request request + ) throws Exception { + final var future = new PlainActionFuture(); + client.execute(action, request, future); + try { + return future.get(10, TimeUnit.SECONDS); + } catch (ExecutionException e) { + if (ExceptionsHelper.unwrapCause(e.getCause()) instanceof Exception cause) { + throw cause; + } + + throw new AssertionError(e); + } catch (Exception e) { + throw new AssertionError(e); + } + } + + public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws Exception { final Map crossClusterApiKeyMap = createCrossClusterAccessApiKey(adminClient(), """ { "replication": [ @@ -141,18 +167,14 @@ public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws assertThat(remoteConnectionInfos, hasSize(1)); assertThat(remoteConnectionInfos.get(0).isConnected(), is(true)); - final Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, - "my_remote_cluster", - threadPool.generic() - ); + final var remoteClusterClient = remoteClusterService.getRemoteClusterClient("my_remote_cluster", threadPool.generic()); // Creating a restore session fails if index is not accessible final ShardId privateShardId = new ShardId("private-index", privateIndexUUID, 0); final PutCcrRestoreSessionRequest request = new PutCcrRestoreSessionRequest(UUIDs.randomBase64UUID(), privateShardId); final ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute(PutCcrRestoreSessionAction.INSTANCE, request).actionGet() + () -> executeRemote(remoteClusterClient, PutCcrRestoreSessionAction.REMOTE_TYPE, request) ); assertThat( e.getMessage(), @@ -169,30 +191,33 @@ public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws final String sessionUUID1 = UUIDs.randomBase64UUID(); final ShardId shardId1 = new ShardId("leader-index-1", leaderIndex1UUID, 0); final PutCcrRestoreSessionRequest request1 = new PutCcrRestoreSessionRequest(sessionUUID1, shardId1); - final PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response1 = remoteClusterClient.execute( - PutCcrRestoreSessionAction.INSTANCE, + final PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response1 = executeRemote( + remoteClusterClient, + PutCcrRestoreSessionAction.REMOTE_TYPE, request1 - ).actionGet(); + ); assertThat(response1.getStoreFileMetadata().fileMetadataMap().keySet(), hasSize(greaterThanOrEqualTo(1))); final String leaderIndex1FileName = response1.getStoreFileMetadata().fileMetadataMap().keySet().iterator().next(); final String sessionUUID2 = UUIDs.randomBase64UUID(); final ShardId shardId2 = new ShardId("leader-index-2", leaderIndex2UUID, 0); final PutCcrRestoreSessionRequest request2 = new PutCcrRestoreSessionRequest(sessionUUID2, shardId2); - final PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response2 = remoteClusterClient.execute( - PutCcrRestoreSessionAction.INSTANCE, + final PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response2 = executeRemote( + remoteClusterClient, + PutCcrRestoreSessionAction.REMOTE_TYPE, request2 - ).actionGet(); + ); assertThat(response2.getStoreFileMetadata().fileMetadataMap().keySet(), hasSize(greaterThanOrEqualTo(1))); final String leaderIndex2FileName = response2.getStoreFileMetadata().fileMetadataMap().keySet().iterator().next(); // Get file chuck fails if requested index is not authorized final var e1 = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute( - GetCcrRestoreFileChunkAction.INSTANCE, + () -> executeRemote( + remoteClusterClient, + GetCcrRestoreFileChunkAction.REMOTE_TYPE, new GetCcrRestoreFileChunkRequest(response1.getNode(), sessionUUID1, leaderIndex1FileName, 1, privateShardId) - ).actionGet() + ) ); assertThat( e1.getMessage(), @@ -202,18 +227,20 @@ public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws // Get file chunk fails if requested index does not match session index final var e2 = expectThrows( IllegalArgumentException.class, - () -> remoteClusterClient.execute( - GetCcrRestoreFileChunkAction.INSTANCE, + () -> executeRemote( + remoteClusterClient, + GetCcrRestoreFileChunkAction.REMOTE_TYPE, new GetCcrRestoreFileChunkRequest(response1.getNode(), sessionUUID1, leaderIndex1FileName, 1, shardId2) - ).actionGet() + ) ); assertThat(e2.getMessage(), containsString("does not match requested shardId")); // Get file chunk fails if requested file is not part of the session final var e3 = expectThrows( IllegalArgumentException.class, - () -> remoteClusterClient.execute( - GetCcrRestoreFileChunkAction.INSTANCE, + () -> executeRemote( + remoteClusterClient, + GetCcrRestoreFileChunkAction.REMOTE_TYPE, new GetCcrRestoreFileChunkRequest( response1.getNode(), sessionUUID1, @@ -221,24 +248,26 @@ public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws 1, shardId1 ) - ).actionGet() + ) ); assertThat(e3.getMessage(), containsString("invalid file name")); // Get file chunk succeeds - final GetCcrRestoreFileChunkAction.GetCcrRestoreFileChunkResponse getChunkResponse = remoteClusterClient.execute( - GetCcrRestoreFileChunkAction.INSTANCE, + final GetCcrRestoreFileChunkAction.GetCcrRestoreFileChunkResponse getChunkResponse = executeRemote( + remoteClusterClient, + GetCcrRestoreFileChunkAction.REMOTE_TYPE, new GetCcrRestoreFileChunkRequest(response2.getNode(), sessionUUID2, leaderIndex2FileName, 1, shardId2) - ).actionGet(); + ); assertThat(getChunkResponse.getChunk().length(), equalTo(1)); // Clear restore session fails if index is unauthorized final var e4 = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute( - ClearCcrRestoreSessionAction.INSTANCE, + () -> executeRemote( + remoteClusterClient, + ClearCcrRestoreSessionAction.REMOTE_TYPE, new ClearCcrRestoreSessionRequest(sessionUUID1, response1.getNode(), privateShardId) - ).actionGet() + ) ); assertThat( e4.getMessage(), @@ -248,22 +277,25 @@ public void testIndicesPrivilegesAreEnforcedForCcrRestoreSessionActions() throws // Clear restore session fails if requested index does not match session index final var e5 = expectThrows( IllegalArgumentException.class, - () -> remoteClusterClient.execute( - ClearCcrRestoreSessionAction.INSTANCE, + () -> executeRemote( + remoteClusterClient, + ClearCcrRestoreSessionAction.REMOTE_TYPE, new ClearCcrRestoreSessionRequest(sessionUUID1, response1.getNode(), shardId2) - ).actionGet() + ) ); assertThat(e5.getMessage(), containsString("does not match requested shardId")); // Clear restore sessions succeed - remoteClusterClient.execute( - ClearCcrRestoreSessionAction.INSTANCE, + executeRemote( + remoteClusterClient, + ClearCcrRestoreSessionAction.REMOTE_TYPE, new ClearCcrRestoreSessionRequest(sessionUUID1, response1.getNode(), shardId1) - ).actionGet(); - remoteClusterClient.execute( - ClearCcrRestoreSessionAction.INSTANCE, + ); + executeRemote( + remoteClusterClient, + ClearCcrRestoreSessionAction.REMOTE_TYPE, new ClearCcrRestoreSessionRequest(sessionUUID2, response2.getNode(), shardId2) - ).actionGet(); + ); } } @@ -278,18 +310,18 @@ public void testRestApiKeyIsNotAllowedOnRemoteClusterPort() throws IOException { final Map apiKeyMap = responseAsMap(createApiKeyResponse); try (MockTransportService service = startTransport("node", threadPool, (String) apiKeyMap.get("encoded"))) { final RemoteClusterService remoteClusterService = service.getRemoteClusterService(); - final Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, + final var remoteClusterClient = remoteClusterService.getRemoteClusterClient( "my_remote_cluster", EsExecutors.DIRECT_EXECUTOR_SERVICE ); final ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute( - RemoteClusterNodesAction.TYPE, + () -> executeRemote( + remoteClusterClient, + RemoteClusterNodesAction.REMOTE_TYPE, RemoteClusterNodesAction.Request.REMOTE_CLUSTER_SERVER_NODES - ).actionGet() + ) ); assertThat( e.getMessage(), @@ -300,7 +332,7 @@ public void testRestApiKeyIsNotAllowedOnRemoteClusterPort() throws IOException { } } - public void testUpdateCrossClusterApiKey() throws IOException { + public void testUpdateCrossClusterApiKey() throws Exception { final Map crossClusterApiKeyMap = createCrossClusterAccessApiKey(adminClient(), """ { "search": [ @@ -351,8 +383,7 @@ public void testUpdateCrossClusterApiKey() throws IOException { final List remoteConnectionInfos = remoteClusterService.getRemoteConnectionInfos().toList(); assertThat(remoteConnectionInfos, hasSize(1)); assertThat(remoteConnectionInfos.get(0).isConnected(), is(true)); - final Client remoteClusterClient = remoteClusterService.getRemoteClusterClient( - threadPool, + final var remoteClusterClient = remoteClusterService.getRemoteClusterClient( "my_remote_cluster", EsExecutors.DIRECT_EXECUTOR_SERVICE ); @@ -360,7 +391,7 @@ public void testUpdateCrossClusterApiKey() throws IOException { // 1. Not accessible because API key does not grant the access final ElasticsearchSecurityException e1 = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet() + () -> executeRemote(remoteClusterClient, TransportFieldCapabilitiesAction.REMOTE_TYPE, request) ); assertThat( e1.getMessage(), @@ -386,10 +417,11 @@ public void testUpdateCrossClusterApiKey() throws IOException { } }"""); assertOK(performRequestWithAdminUser(adminClient(), updateApiKeyRequest)); - final FieldCapabilitiesResponse fieldCapabilitiesResponse = remoteClusterClient.execute( - TransportFieldCapabilitiesAction.TYPE, + final FieldCapabilitiesResponse fieldCapabilitiesResponse = executeRemote( + remoteClusterClient, + TransportFieldCapabilitiesAction.REMOTE_TYPE, request - ).actionGet(); + ); assertThat(fieldCapabilitiesResponse.getIndices(), arrayContaining("index")); // 3. Update the API key again to remove access @@ -407,7 +439,7 @@ public void testUpdateCrossClusterApiKey() throws IOException { assertOK(performRequestWithAdminUser(adminClient(), updateApiKeyRequest)); final ElasticsearchSecurityException e2 = expectThrows( ElasticsearchSecurityException.class, - () -> remoteClusterClient.execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet() + () -> executeRemote(remoteClusterClient, TransportFieldCapabilitiesAction.REMOTE_TYPE, request) ); assertThat( e2.getMessage(), diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java new file mode 100644 index 0000000000000..478d3c76f4b36 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + +import org.apache.lucene.tests.util.TimeUnits; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +// account for slow stored secure settings updates (involves removing and re-creating the keystore) +@TimeoutSuite(millis = 10 * TimeUnits.MINUTE) +public class RemoteClusterSecurityReloadCredentialsRestIT extends AbstractRemoteClusterSecurityTestCase { + + private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .apply(commonClusterConfig) + .setting("remote_cluster_server.enabled", "true") + .setting("remote_cluster.port", "0") + .setting("xpack.security.remote_cluster_server.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .apply(commonClusterConfig) + .setting("xpack.security.remote_cluster_client.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .keystore(keystoreSettings) + .settings((ignored) -> { + // Use an alternative cluster alias to test credential setup when remote cluster settings are configured in + // elasticsearch.yml + final Map settings = new HashMap<>(); + final String remoteClusterEndpoint = fulfillingCluster.getRemoteClusterServerEndpoint(0); + final boolean isProxyMode = randomBoolean(); + final String clusterAlias = "my_aliased_remote_cluster"; + if (isProxyMode) { + settings.put("cluster.remote." + clusterAlias + ".mode", "proxy"); + settings.put("cluster.remote." + clusterAlias + ".proxy_address", "\"" + remoteClusterEndpoint + "\""); + } else { + settings.put("cluster.remote." + clusterAlias + ".mode", "sniff"); + settings.put("cluster.remote." + clusterAlias + ".seeds", "[\"" + remoteClusterEndpoint + "\"]"); + } + return settings; + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(REMOTE_SEARCH_USER, PASS.toString(), "read_remote_shared_logs", false) + .build(); + } + + @ClassRule + // Use a RuleChain to ensure that fulfilling cluster is started before query cluster + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + @Before + public void setup() throws IOException { + indexDocumentsOnFulfillingCluster(); + } + + @After + public void cleanUp() throws IOException { + removeRemoteCluster(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); + } + + public void testFirstTimeSetupWithElasticsearchSettings() throws Exception { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + configureRemoteClusterCredentials("my_aliased_remote_cluster", (String) apiKeyMap.get("encoded"), keystoreSettings); + assertSharedLogsSearchSuccess("my_aliased_remote_cluster"); + removeRemoteClusterCredentials("my_aliased_remote_cluster", keystoreSettings); + } + + public void testFirstTimeSetup() throws Exception { + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + public void testUpgradeFromRcs1() throws Exception { + // Setup RCS 1.0 and check that it works + configureRemoteCluster("my_remote_cluster", fulfillingCluster, true, randomBoolean(), randomBoolean()); + final Request putRoleRequest = new Request("POST", "/_security/role/read_remote_shared_logs"); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": [ "shared-logs" ], + "privileges": [ "read", "read_cross_cluster" ] + } + ] + }"""); + performRequestAgainstFulfillingCluster(putRoleRequest); + assertSharedLogsSearchSuccess("my_remote_cluster"); + + // Now migrate to RCS 2.0 + // Optionally remove existing cluster definition first. In practice removing the cluster definition first is the recommended + // approach since otherwise the reload-secure-settings call may result in WARN logs, but it's functionally possible not to + // remove the definition + if (randomBoolean()) { + removeRemoteCluster(); + } + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + public void testDowngradeToRcs1() throws Exception { + configureRcs2(); + assertSharedLogsSearchSuccess("my_remote_cluster"); + + if (randomBoolean()) { + removeRemoteCluster(); + } + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); + configureRemoteCluster("my_remote_cluster", fulfillingCluster, true, randomBoolean(), randomBoolean()); + final Request putRoleRequest = new Request("POST", "/_security/role/read_remote_shared_logs"); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": [ "shared-logs" ], + "privileges": [ "read", "read_cross_cluster" ] + } + ] + }"""); + performRequestAgainstFulfillingCluster(putRoleRequest); + assertSharedLogsSearchSuccess("my_remote_cluster"); + } + + private void removeRemoteCluster() throws IOException { + updateClusterSettings( + Settings.builder() + .putNull("cluster.remote.my_remote_cluster.mode") + .putNull("cluster.remote.my_remote_cluster.skip_unavailable") + .putNull("cluster.remote.my_remote_cluster.proxy_address") + .putNull("cluster.remote.my_remote_cluster.seeds") + .build() + ); + } + + private void configureRcs2() throws Exception { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + final String remoteClusterCredentials = (String) apiKeyMap.get("encoded"); + + final boolean isProxyMode = randomBoolean(); + final boolean configureSettingsFirst = randomBoolean(); + // it's valid to first configure remote cluster, then credentials + if (configureSettingsFirst) { + putRemoteClusterSettings("my_remote_cluster", fulfillingCluster, false, isProxyMode, randomBoolean()); + } + + configureRemoteClusterCredentials("my_remote_cluster", remoteClusterCredentials, keystoreSettings); + + // also valid to configure credentials, then cluster + if (false == configureSettingsFirst) { + configureRemoteCluster("my_remote_cluster"); + } else { + // now that credentials are configured, we expect a successful connection + checkRemoteConnection("my_remote_cluster", fulfillingCluster, false, isProxyMode); + } + } + + private void assertSharedLogsSearchSuccess(String clusterAlias) throws IOException { + final Response response = performRequestWithRemoteSearchUser( + new Request( + "GET", + String.format(Locale.ROOT, "/%s:shared-logs/_search?ccs_minimize_roundtrips=%s", clusterAlias, randomBoolean()) + ) + ); + assertOK(response); + final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + try { + final List actualIndices = Arrays.stream(searchResponse.getHits().getHits()) + .map(SearchHit::getIndex) + .collect(Collectors.toList()); + assertThat(actualIndices, containsInAnyOrder("shared-logs")); + } finally { + searchResponse.decRef(); + } + } + + private void indexDocumentsOnFulfillingCluster() throws IOException { + final var indexDocRequest = new Request("POST", "/shared-logs/_doc/1?refresh=true"); + indexDocRequest.setJsonEntity("{\"field\": \"1\"}"); + assertOK(performRequestAgainstFulfillingCluster(indexDocRequest)); + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java index 39985212bc2fd..0376786225723 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTransformMigrationIT.java @@ -156,7 +156,7 @@ public void testRcs1Setup() throws Exception { // First migrate to RCS 2.0 @Order(30) - public void testQueryClusterRestartForRcs2() throws IOException { + public void testQueryClusterCredentialsChangeForRcs2() throws IOException { // Update the transform_user_role so that it is sufficient for both RCS 1.0 and 2.0 final Request putRoleRequest = new Request("POST", "/_security/role/" + TRANSFORM_USER_ROLE); putRoleRequest.setJsonEntity(""" @@ -198,9 +198,7 @@ public void testQueryClusterRestartForRcs2() throws IOException { } ] }"""); - keystoreSettings.put("cluster.remote.my_remote_cluster.credentials", (String) crossClusterAccessApiKey.get("encoded")); - queryCluster.restart(false); - closeClients(); + configureRemoteClusterCredentials("my_remote_cluster", (String) crossClusterAccessApiKey.get("encoded"), keystoreSettings); } @Order(40) @@ -222,7 +220,7 @@ public void testRcs2Setup() throws Exception { // Second migrate back to RCS 1.0 @Order(50) - public void testQueryClusterRestartAgainForRcs1() throws IOException { + public void testQueryClusterCredentialsChangeAgainForRcs1() throws IOException { stopTransform(); // Remove the RCS 2.0 remote cluster @@ -247,9 +245,7 @@ public void testQueryClusterRestartAgainForRcs1() throws IOException { indexSourceDocuments(new UserStars("a", 0)); // Remove remote cluster credentials to revert back to RCS 1.0 - keystoreSettings.remove("cluster.remote.my_remote_cluster.credentials"); - queryCluster.restart(false); - closeClients(); + removeRemoteClusterCredentials("my_remote_cluster", keystoreSettings); } @Order(60) diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index cab0c2bff28f0..b6893e853f256 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -134,8 +134,13 @@ public class Constants { "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_last_sync_stats", "cluster:admin/xpack/connector/update_name", + "cluster:admin/xpack/connector/update_native", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", + "cluster:admin/xpack/connector/update_service_type", + "cluster:admin/xpack/connector/secret/delete", + "cluster:admin/xpack/connector/secret/get", + "cluster:admin/xpack/connector/secret/post", "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/delete", @@ -282,6 +287,7 @@ public class Constants { "cluster:admin/xpack/security/user/change_password", "cluster:admin/xpack/security/user/delete", "cluster:admin/xpack/security/user/get", + "cluster:admin/xpack/security/user/query", "cluster:admin/xpack/security/user/has_privileges", "cluster:admin/xpack/security/user/list_privileges", "cluster:admin/xpack/security/user/put", @@ -340,6 +346,7 @@ public class Constants { "cluster:monitor/task/get", "cluster:monitor/tasks/lists", "cluster:monitor/text_structure/findstructure", + "cluster:monitor/text_structure/test_grok_pattern", "cluster:monitor/transform/get", "cluster:monitor/transform/stats/get", "cluster:monitor/xpack/analytics/stats", diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java index 7842493abbc7a..87ef55b5b8633 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesTestPlugin.java @@ -9,6 +9,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -27,6 +28,7 @@ public class OperatorPrivilegesTestPlugin extends Plugin implements ActionPlugin @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index 17f53534cfc7a..ac821e670fde0 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(':x-pack:plugin:core') - javaRestTestImplementation project(':client:rest-high-level') javaRestTestImplementation project(':x-pack:plugin:security') } @@ -13,4 +12,4 @@ boolean literalUsername = BuildParams.random.nextBoolean() tasks.named("javaRestTest").configure { usesDefaultDistribution() systemProperty 'test.literalUsername', literalUsername -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/qa/saml-rest-tests/build.gradle b/x-pack/plugin/security/qa/saml-rest-tests/build.gradle index 8953e7f0afbd4..9d4d5539b3ffc 100644 --- a/x-pack/plugin/security/qa/saml-rest-tests/build.gradle +++ b/x-pack/plugin/security/qa/saml-rest-tests/build.gradle @@ -2,7 +2,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(':x-pack:plugin:core') - javaRestTestImplementation project(':client:rest-high-level') javaRestTestImplementation project(':x-pack:plugin:security') clusterModules(project(":modules:analysis-common")) } diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index f1fa2dabfb1f6..7684d879671ab 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -6,7 +6,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) - javaRestTestImplementation project(":client:rest-high-level") } tasks.named('javaRestTest') { diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java index f79077ae3a550..e552befc267c8 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java @@ -9,8 +9,10 @@ import org.apache.http.HttpHeaders; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; @@ -21,6 +23,7 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Base64; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -29,6 +32,7 @@ import java.util.stream.IntStream; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -43,6 +47,8 @@ public class QueryApiKeyIT extends SecurityInBasicRestTestCase { private static final String API_KEY_ADMIN_AUTH_HEADER = "Basic YXBpX2tleV9hZG1pbjpzZWN1cml0eS10ZXN0LXBhc3N3b3Jk"; private static final String API_KEY_USER_AUTH_HEADER = "Basic YXBpX2tleV91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; private static final String TEST_USER_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; + private static final String SYSTEM_WRITE_ROLE_NAME = "system_write"; + private static final String SUPERUSER_WITH_SYSTEM_WRITE = "superuser_with_system_write"; public void testQuery() throws IOException { createApiKeys(); @@ -125,8 +131,9 @@ public void testQuery() throws IOException { }); // Search for fields outside of the allowlist fails - assertQueryError(API_KEY_ADMIN_AUTH_HEADER, 400, """ + ResponseException responseException = assertQueryError(API_KEY_ADMIN_AUTH_HEADER, 400, """ { "query": { "prefix": {"api_key_hash": "{PBKDF2}10000$"} } }"""); + assertThat(responseException.getMessage(), containsString("Field [api_key_hash] is not allowed for API Key query")); // Search for fields that are not allowed in Query DSL but used internally by the service itself final String fieldName = randomFrom("doc_type", "api_key_invalidated", "invalidation_time"); @@ -297,6 +304,71 @@ public void testPagination() throws IOException, InterruptedException { assertThat(responseMap2.get("count"), equalTo(0)); } + public void testTypeField() throws Exception { + final List allApiKeyIds = new ArrayList<>(7); + for (int i = 0; i < 7; i++) { + allApiKeyIds.add( + createApiKey("typed_key_" + i, Map.of(), randomFrom(API_KEY_ADMIN_AUTH_HEADER, API_KEY_USER_AUTH_HEADER)).v1() + ); + } + List apiKeyIdsSubset = randomSubsetOf(allApiKeyIds); + List apiKeyIdsSubsetDifference = new ArrayList<>(allApiKeyIds); + apiKeyIdsSubsetDifference.removeAll(apiKeyIdsSubset); + + List apiKeyRestTypeQueries = List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"bool": {"must_not": [{"term": {"type": "cross_cluster"}}, {"term": {"type": "other"}}]}}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}"""); + + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(allApiKeyIds.toArray(new String[0])) + ); + }); + } + + createSystemWriteRole(SYSTEM_WRITE_ROLE_NAME); + String systemWriteCreds = createUser(SUPERUSER_WITH_SYSTEM_WRITE, new String[] { "superuser", SYSTEM_WRITE_ROLE_NAME }); + + // test keys with no "type" field are still considered of type "rest" + // this is so in order to accommodate pre-8.9 API keys which where all of type "rest" implicitly + updateApiKeys(systemWriteCreds, "ctx._source.remove('type');", apiKeyIdsSubset); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(allApiKeyIds.toArray(new String[0])) + ); + }); + } + + // but the same keys with type "other" are NOT of type "rest" + updateApiKeys(systemWriteCreds, "ctx._source['type']='other';", apiKeyIdsSubset); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIdsSubsetDifference.toArray(new String[0])) + ); + }); + } + // the complement set is not of type "rest" if it is "cross_cluster" + updateApiKeys(systemWriteCreds, "ctx._source['type']='rest';", apiKeyIdsSubset); + updateApiKeys(systemWriteCreds, "ctx._source['type']='cross_cluster';", apiKeyIdsSubsetDifference); + for (String query : apiKeyRestTypeQueries) { + assertQuery(API_KEY_ADMIN_AUTH_HEADER, query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIdsSubset.toArray(new String[0])) + ); + }); + } + } + @SuppressWarnings("unchecked") public void testSort() throws IOException { final String authHeader = randomFrom(API_KEY_ADMIN_AUTH_HEADER, API_KEY_USER_AUTH_HEADER); @@ -359,6 +431,130 @@ public void testSort() throws IOException { assertQueryError(authHeader, 400, "{\"sort\":[\"" + invalidFieldName + "\"]}"); } + public void testSimpleQueryStringQuery() throws IOException { + String batmanUserCredentials = createUser("batman", new String[] { "api_key_user_role" }); + final List apiKeyIds = new ArrayList<>(); + apiKeyIds.add(createApiKey("key1-user", null, null, Map.of("label", "prod"), API_KEY_USER_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key1-admin", null, null, Map.of("label", "prod"), API_KEY_ADMIN_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key2-user", null, null, Map.of("value", 42, "label", "prod"), API_KEY_USER_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key2-admin", null, null, Map.of("value", 42, "label", "prod"), API_KEY_ADMIN_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key3-user", null, null, Map.of("value", 42, "hero", true), API_KEY_USER_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key3-admin", null, null, Map.of("value", 42, "hero", true), API_KEY_ADMIN_AUTH_HEADER).v1()); + apiKeyIds.add(createApiKey("key4-batman", null, null, Map.of("hero", true), batmanUserCredentials).v1()); + apiKeyIds.add(createApiKey("key5-batman", null, null, Map.of("hero", true), batmanUserCredentials).v1()); + + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "key*", "fields": ["no_such_field_pattern*"]}}}""", + apiKeys -> assertThat(apiKeys, is(empty())) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "prod 42 true", "fields": ["metadata.*"]}}}""", + apiKeys -> assertThat(apiKeys, is(empty())) + ); + // disallowed fields are silently ignored for the simple query string query type + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "ke*", "fields": ["x*", "api_key_hash"]}}}""", + apiKeys -> assertThat(apiKeys, is(empty())) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "prod 42 true", "fields": ["wild*", "metadata"]}}}""", + apiKeys -> assertThat(apiKeys.stream().map(k -> (String) k.get("id")).toList(), containsInAnyOrder(apiKeyIds.toArray())) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "key* +rest" }}}""", + apiKeys -> assertThat(apiKeys.stream().map(k -> (String) k.get("id")).toList(), containsInAnyOrder(apiKeyIds.toArray())) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "-prod", "fields": ["metadata"]}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIds.get(4), apiKeyIds.get(5), apiKeyIds.get(6), apiKeyIds.get(7)) + ) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "-42", "fields": ["meta*", "whatever*"]}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIds.get(0), apiKeyIds.get(1), apiKeyIds.get(6), apiKeyIds.get(7)) + ) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "-rest term_which_does_not_exist"}}}""", + apiKeys -> assertThat(apiKeys, is(empty())) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "+default_file +api_key_user", "fields": ["us*", "rea*"]}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIds.get(0), apiKeyIds.get(2), apiKeyIds.get(4)) + ) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "default_fie~4", "fields": ["*"]}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder( + apiKeyIds.get(0), + apiKeyIds.get(1), + apiKeyIds.get(2), + apiKeyIds.get(3), + apiKeyIds.get(4), + apiKeyIds.get(5) + ) + ) + ); + assertQuery( + API_KEY_ADMIN_AUTH_HEADER, + """ + {"query": {"simple_query_string": {"query": "+prod +42", + "fields": ["metadata.label", "metadata.value", "metadata.hero"]}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIds.get(2), apiKeyIds.get(3)) + ) + ); + assertQuery(batmanUserCredentials, """ + {"query": {"simple_query_string": {"query": "+prod key*", "fields": ["name", "username", "metadata"], + "default_operator": "AND"}}}""", apiKeys -> assertThat(apiKeys, is(empty()))); + assertQuery( + batmanUserCredentials, + """ + {"query": {"simple_query_string": {"query": "+true +key*", "fields": ["name", "username", "metadata"], + "default_operator": "AND"}}}""", + apiKeys -> assertThat( + apiKeys.stream().map(k -> (String) k.get("id")).toList(), + containsInAnyOrder(apiKeyIds.get(6), apiKeyIds.get(7)) + ) + ); + assertQuery( + batmanUserCredentials, + """ + {"query": {"bool": {"must": [{"term": {"name": {"value":"key5-batman"}}}, + {"simple_query_string": {"query": "default_native"}}]}}}""", + apiKeys -> assertThat(apiKeys.stream().map(k -> (String) k.get("id")).toList(), containsInAnyOrder(apiKeyIds.get(7))) + ); + } + public void testExistsQuery() throws IOException, InterruptedException { final String authHeader = randomFrom(API_KEY_ADMIN_AUTH_HEADER, API_KEY_USER_AUTH_HEADER); @@ -460,12 +656,13 @@ private int collectApiKeys(List> apiKeyInfos, Request reques return actualSize; } - private void assertQueryError(String authHeader, int statusCode, String body) throws IOException { + private ResponseException assertQueryError(String authHeader, int statusCode, String body) throws IOException { final Request request = new Request("GET", "/_security/_query/api_key"); request.setJsonEntity(body); request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader)); final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(statusCode)); + return responseException; } private void assertQuery(String authHeader, String body, Consumer>> apiKeysVerifier) throws IOException { @@ -598,10 +795,73 @@ private String createAndInvalidateApiKey(String name, String authHeader) throws return tuple.v1(); } - private void createUser(String name) throws IOException { - final Request request = new Request("POST", "/_security/user/" + name); - request.setJsonEntity(""" - {"password":"super-strong-password","roles":[]}"""); - assertOK(adminClient().performRequest(request)); + private String createUser(String username) throws IOException { + return createUser(username, new String[0]); + } + + private String createUser(String username, String[] roles) throws IOException { + final Request request = new Request("POST", "/_security/user/" + username); + Map body = Map.ofEntries(Map.entry("roles", roles), Map.entry("password", "super-strong-password".toString())); + request.setJsonEntity(XContentTestUtils.convertToXContent(body, XContentType.JSON).utf8ToString()); + Response response = adminClient().performRequest(request); + assertOK(response); + return basicAuthHeaderValue(username, new SecureString("super-strong-password".toCharArray())); + } + + private void createSystemWriteRole(String roleName) throws IOException { + final Request addRole = new Request("POST", "/_security/role/" + roleName); + addRole.setJsonEntity(""" + { + "indices": [ + { + "names": [ "*" ], + "privileges": ["all"], + "allow_restricted_indices" : true + } + ] + }"""); + Response response = adminClient().performRequest(addRole); + assertOK(response); + } + + private void expectWarnings(Request request, String... expectedWarnings) { + final Set expected = Set.of(expectedWarnings); + RequestOptions options = request.getOptions().toBuilder().setWarningsHandler(warnings -> { + final Set actual = Set.copyOf(warnings); + // Return true if the warnings aren't what we expected; the client will treat them as a fatal error. + return actual.equals(expected) == false; + }).build(); + request.setOptions(options); + } + + private void updateApiKeys(String creds, String script, Collection ids) throws IOException { + if (ids.isEmpty()) { + return; + } + final Request request = new Request("POST", "/.security/_update_by_query?refresh=true&wait_for_completion=true"); + request.setJsonEntity(Strings.format(""" + { + "script": { + "source": "%s", + "lang": "painless" + }, + "query": { + "bool": { + "must": [ + {"term": {"doc_type": "api_key"}}, + {"ids": {"values": %s}} + ] + } + } + } + """, script, ids.stream().map(id -> "\"" + id + "\"").collect(Collectors.toList()))); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, creds)); + expectWarnings( + request, + "this request accesses system indices: [.security-7]," + + " but in a future major version, direct access to system indices will be prevented by default" + ); + Response response = client().performRequest(request); + assertOK(response); } } diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java new file mode 100644 index 0000000000000..1bd3e9ed927fe --- /dev/null +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java @@ -0,0 +1,487 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.core.IsEqual.equalTo; + +public class QueryUserIT extends SecurityInBasicRestTestCase { + + private static final String READ_USERS_USER_AUTH_HEADER = "Basic cmVhZF91c2Vyc191c2VyOnJlYWQtdXNlcnMtcGFzc3dvcmQ="; + private static final String TEST_USER_NO_READ_USERS_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; + + private static final Set reservedUsers = Set.of( + "elastic", + "kibana", + "kibana_system", + "logstash_system", + "beats_system", + "apm_system", + "remote_monitoring_user" + ); + + private Request queryUserRequestWithAuth() { + final Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/user"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + return request; + } + + public void testQuery() throws IOException { + // No users to match yet + assertQuery("", users -> assertThat(users, empty())); + + int randomUserCount = createRandomUsers().size(); + + // An empty request body means search for all users (page size = 10) + assertQuery("", users -> assertThat(users.size(), equalTo(Math.min(randomUserCount, 10)))); + + // Match all + assertQuery( + String.format(""" + {"query":{"match_all":{}},"from":0,"size":%s}""", randomUserCount), + users -> assertThat(users.size(), equalTo(randomUserCount)) + ); + + // Exists query + String field = randomFrom("username", "full_name", "roles", "enabled"); + assertQuery( + String.format(""" + {"query":{"exists":{"field":"%s"}},"from":0,"size":%s}""", field, randomUserCount), + users -> assertEquals(users.size(), randomUserCount) + ); + + // Prefix search + User prefixUser1 = createUser( + "mr-prefix1", + new String[] { "master-of-the-universe", "some-other-role" }, + "Prefix1", + "email@something.com", + Map.of(), + true + ); + User prefixUser2 = createUser( + "mr-prefix2", + new String[] { "master-of-the-world", "some-other-role" }, + "Prefix2", + "email@something.com", + Map.of(), + true + ); + assertQuery(""" + {"query":{"bool":{"must":[{"prefix":{"roles":"master-of-the"}}]}},"sort":["username"]}""", returnedUsers -> { + assertThat(returnedUsers, hasSize(2)); + assertUser(prefixUser1, returnedUsers.get(0)); + assertUser(prefixUser2, returnedUsers.get(1)); + }); + + // Wildcard search + assertQuery(""" + { "query": { "wildcard": {"username": "mr-prefix*"} },"sort":["username"]}""", users -> { + assertThat(users.size(), equalTo(2)); + assertUser(prefixUser1, users.get(0)); + assertUser(prefixUser2, users.get(1)); + }); + + // Terms query + assertQuery(""" + {"query":{"terms":{"roles":["some-other-role"]}},"sort":["username"]}""", users -> { + assertThat(users.size(), equalTo(2)); + assertUser(prefixUser1, users.get(0)); + assertUser(prefixUser2, users.get(1)); + }); + + // Test other fields + User otherFieldsTestUser = createUser( + "batman-official-user", + new String[] { "bat-cave-admin" }, + "Batman", + "batman@hotmail.com", + Map.of(), + true + ); + String enabledTerm = "\"enabled\":true"; + String fullNameTerm = "\"full_name\":\"batman\""; + String emailTerm = "\"email\":\"batman@hotmail.com\""; + + final String term = randomFrom(enabledTerm, fullNameTerm, emailTerm); + assertQuery( + Strings.format(""" + {"query":{"term":{%s}},"size":100}""", term), + users -> assertThat( + users.stream().map(u -> u.get(User.Fields.USERNAME.getPreferredName()).toString()).toList(), + hasItem("batman-official-user") + ) + ); + + // Test complex query + assertQuery(""" + { "query": {"bool": {"must": [ + {"wildcard": {"username": "batman-official*"}}, + {"term": {"enabled": true}}],"filter": [{"prefix": {"roles": "bat-cave"}}]}}}""", users -> { + assertThat(users.size(), equalTo(1)); + assertUser(otherFieldsTestUser, users.get(0)); + }); + + // Search for fields outside the allowlist fails + assertQueryError(400, """ + { "query": { "prefix": {"not_allowed": "ABC"} } }"""); + + // Search for fields that are not allowed in Query DSL but used internally by the service itself + final String fieldName = randomFrom("type", "password"); + assertQueryError(400, Strings.format(""" + { "query": { "term": {"%s": "%s"} } }""", fieldName, randomAlphaOfLengthBetween(3, 8))); + + // User without read_security gets 403 trying to search Users + assertQueryError(TEST_USER_NO_READ_USERS_AUTH_HEADER, 403, """ + { "query": { "wildcard": {"name": "*prefix*"} } }"""); + + // Range query not supported + assertQueryError(400, """ + {"query":{"range":{"username":{"lt":"now"}}}}"""); + + // IDs query not supported + assertQueryError(400, """ + { "query": { "ids": { "values": "abc" } } }"""); + + // Make sure we can't query reserved users + String reservedUsername = getReservedUsernameAndAssertExists(); + assertQuery(String.format(""" + {"query":{"term":{"username":"%s"}}}""", reservedUsername), users -> assertTrue(users.isEmpty())); + } + + public void testPagination() throws IOException { + final List users = createRandomUsers(); + + final int from = randomIntBetween(0, 3); + final int size = randomIntBetween(2, 5); + final int remaining = users.size() - from; + + // Using string only sorting to simplify test + final String sortField = "username"; + final List> allUserInfos = new ArrayList<>(remaining); + { + Request request = queryUserRequestWithAuth(); + request.setJsonEntity("{\"from\":" + from + ",\"size\":" + size + ",\"sort\":[\"" + sortField + "\"]}"); + allUserInfos.addAll(collectUsers(request, users.size())); + } + // first batch should be a full page + assertThat(allUserInfos.size(), equalTo(size)); + + while (allUserInfos.size() < remaining) { + final Request request = queryUserRequestWithAuth(); + final List sortValues = extractSortValues(allUserInfos.get(allUserInfos.size() - 1)); + + request.setJsonEntity(Strings.format(""" + {"size":%s,"sort":["%s"],"search_after":["%s"]} + """, size, sortField, sortValues.get(0))); + final List> userInfoPage = collectUsers(request, users.size()); + + if (userInfoPage.isEmpty() && allUserInfos.size() < remaining) { + fail("fail to retrieve all Users, expect [" + remaining + "], got [" + allUserInfos + "]"); + } + allUserInfos.addAll(userInfoPage); + + // Before all users are retrieved, each page should be a full page + if (allUserInfos.size() < remaining) { + assertThat(userInfoPage.size(), equalTo(size)); + } + } + + // Assert sort values match the field of User information + assertThat( + allUserInfos.stream().map(m -> m.get(sortField)).toList(), + equalTo(allUserInfos.stream().map(m -> extractSortValues(m).get(0)).toList()) + ); + + // Assert that all users match the created users and that they're sorted correctly + assertUsers(users, allUserInfos, sortField, from); + + // size can be zero, but total should still reflect the number of users matched + final Request request = queryUserRequestWithAuth(); + request.setJsonEntity("{\"size\":0}"); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + assertThat(responseMap.get("total"), equalTo(users.size())); + assertThat(responseMap.get("count"), equalTo(0)); + } + + @SuppressWarnings("unchecked") + public void testSort() throws IOException { + final List testUsers = List.of( + createUser("a", new String[] { "4", "5", "6" }), + createUser("b", new String[] { "5", "6" }), + createUser("c", new String[] { "7", "8" }) + ); + assertQuery(""" + {"sort":[{"username":{"order":"desc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 2, j = 0; i >= 0; i--, j++) { + assertUser(testUsers.get(j), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + assertQuery(""" + {"sort":[{"username":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + assertQuery(""" + {"sort":[{"roles":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + // Only first element of array is used for sorting + assertThat(((List) users.get(i).get("roles")).get(0), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + // Make sure sorting on _doc works + assertQuery(""" + {"sort":["_doc"]}""", users -> assertThat(users.size(), equalTo(3))); + + // Make sure multi-field sorting works + assertQuery(""" + {"sort":[{"username":{"order":"asc"}}, {"roles":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + assertThat(((List) users.get(i).get("roles")).get(0), equalTo(((List) users.get(i).get("_sort")).get(1))); + } + }); + + final String invalidFieldName = randomFrom("doc_type", "invalid", "password"); + assertQueryError(400, "{\"sort\":[\"" + invalidFieldName + "\"]}"); + + final String invalidSortName = randomFrom("email", "full_name"); + assertQueryError( + READ_USERS_USER_AUTH_HEADER, + 400, + String.format("{\"sort\":[\"%s\"]}", invalidSortName), + String.format("sorting is not supported for field [%s] in User query", invalidSortName) + ); + } + + private String getReservedUsernameAndAssertExists() throws IOException { + String username = randomFrom(reservedUsers); + final Request request = new Request("GET", "/_security/user"); + + if (randomBoolean()) { + // Update the user to create it in the security index + Request putUserRequest = new Request("PUT", "/_security/user/" + username); + putUserRequest.setJsonEntity("{\"enabled\": true}"); + } + + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + assertNotNull(responseMap.get(username)); + return username; + } + + @SuppressWarnings("unchecked") + private List extractSortValues(Map userInfo) { + return (List) userInfo.get("_sort"); + } + + private List> collectUsers(Request request, int total) throws IOException { + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> userInfos = (List>) responseMap.get("users"); + assertThat(responseMap.get("total"), equalTo(total)); + assertThat(responseMap.get("count"), equalTo(userInfos.size())); + return userInfos; + } + + private void assertQueryError(int statusCode, String body) { + assertQueryError(READ_USERS_USER_AUTH_HEADER, statusCode, body); + } + + private void assertQueryError(String authHeader, int statusCode, String body) { + assertQueryError(authHeader, statusCode, body, null); + } + + private void assertQueryError(String authHeader, int statusCode, String body, String errorMessage) { + final Request request = new Request(randomFrom("GET", "POST"), "/_security/_query/user"); + request.setJsonEntity(body); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader)); + final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(statusCode)); + if (errorMessage != null) { + assertTrue(responseException.getMessage().contains(errorMessage)); + } + } + + private void assertQuery(String body, Consumer>> userVerifier) throws IOException { + final Request request = queryUserRequestWithAuth(); + request.setJsonEntity(body); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> users = (List>) responseMap.get("users"); + userVerifier.accept(users); + } + + private void assertUser(User expectedUser, Map actualUser) { + assertUser(userToMap(expectedUser), actualUser); + } + + @SuppressWarnings("unchecked") + private void assertUser(Map expectedUser, Map actualUser) { + assertEquals(expectedUser.get(User.Fields.USERNAME.getPreferredName()), actualUser.get(User.Fields.USERNAME.getPreferredName())); + assertArrayEquals( + ((List) expectedUser.get(User.Fields.ROLES.getPreferredName())).toArray(), + ((List) actualUser.get(User.Fields.ROLES.getPreferredName())).toArray() + ); + assertEquals(expectedUser.get(User.Fields.FULL_NAME.getPreferredName()), actualUser.get(User.Fields.FULL_NAME.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.EMAIL.getPreferredName()), actualUser.get(User.Fields.EMAIL.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.METADATA.getPreferredName()), actualUser.get(User.Fields.METADATA.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.ENABLED.getPreferredName()), actualUser.get(User.Fields.ENABLED.getPreferredName())); + } + + private Map userToMap(User user) { + return Map.of( + User.Fields.USERNAME.getPreferredName(), + user.principal(), + User.Fields.ROLES.getPreferredName(), + Arrays.stream(user.roles()).toList(), + User.Fields.FULL_NAME.getPreferredName(), + user.fullName(), + User.Fields.EMAIL.getPreferredName(), + user.email(), + User.Fields.METADATA.getPreferredName(), + user.metadata(), + User.Fields.ENABLED.getPreferredName(), + user.enabled() + ); + } + + private void assertUsers(List expectedUsers, List> actualUsers, String sortField, int from) { + assertEquals(expectedUsers.size() - from, actualUsers.size()); + + List> sortedExpectedUsers = expectedUsers.stream() + .map(this::userToMap) + .sorted(Comparator.comparing(user -> user.get(sortField).toString())) + .toList(); + + for (int i = from; i < sortedExpectedUsers.size(); i++) { + assertUser(sortedExpectedUsers.get(i), actualUsers.get(i - from)); + } + } + + public static Map randomUserMetadata() { + return ESTestCase.randomFrom( + Map.of( + "employee_id", + ESTestCase.randomAlphaOfLength(5), + "number", + 1, + "numbers", + List.of(1, 3, 5), + "extra", + Map.of("favorite pizza", "margherita", "age", 42) + ), + Map.of(ESTestCase.randomAlphaOfLengthBetween(3, 8), ESTestCase.randomAlphaOfLengthBetween(3, 8)), + Map.of(), + null + ); + } + + private List createRandomUsers() throws IOException { + int randomUserCount = randomIntBetween(8, 15); + final List users = new ArrayList<>(randomUserCount); + + for (int i = 0; i < randomUserCount; i++) { + users.add( + createUser( + randomValueOtherThanMany(reservedUsers::contains, () -> randomAlphaOfLengthBetween(3, 8)) + "-" + i, + randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)), + randomAlphaOfLengthBetween(3, 8), + randomAlphaOfLengthBetween(3, 8), + randomUserMetadata(), + randomBoolean() + ) + ); + } + + return users; + } + + private User createUser(String userName, String[] roles) throws IOException { + return createUser( + userName, + roles, + randomAlphaOfLengthBetween(3, 8), + randomAlphaOfLengthBetween(3, 8), + randomUserMetadata(), + randomBoolean() + ); + } + + private User createUser(String userName, String[] roles, String fullName, String email, Map metadata, boolean enabled) + throws IOException { + + final Request request = new Request("POST", "/_security/user/" + userName); + BytesReference source = BytesReference.bytes( + jsonBuilder().map( + Map.of( + User.Fields.USERNAME.getPreferredName(), + userName, + User.Fields.ROLES.getPreferredName(), + roles, + User.Fields.FULL_NAME.getPreferredName(), + fullName, + User.Fields.EMAIL.getPreferredName(), + email, + User.Fields.METADATA.getPreferredName(), + metadata == null ? Map.of() : metadata, + User.Fields.PASSWORD.getPreferredName(), + "100%-security-guaranteed", + User.Fields.ENABLED.getPreferredName(), + enabled + ) + ) + ); + request.setJsonEntity(source.utf8ToString()); + Response response = adminClient().performRequest(request); + assertOK(response); + assertTrue((boolean) responseAsMap(response).get("created")); + return new User(userName, roles, fullName, email, metadata, enabled); + } +} diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java index 5843350e36457..587cc4643514c 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java @@ -22,6 +22,9 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { protected static final String REST_USER = "security_test_user"; private static final SecureString REST_PASSWORD = new SecureString("security-test-password".toCharArray()); + protected static final String READ_USERS_USER = "read_users_user"; + private static final SecureString READ_USERS_PASSWORD = new SecureString("read-users-password".toCharArray()); + private static final String ADMIN_USER = "admin_user"; private static final SecureString ADMIN_PASSWORD = new SecureString("admin-password".toCharArray()); @@ -47,6 +50,7 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { .user(REST_USER, REST_PASSWORD.toString(), "security_test_role", false) .user(API_KEY_USER, API_KEY_USER_PASSWORD.toString(), "api_key_user_role", false) .user(API_KEY_ADMIN_USER, API_KEY_ADMIN_USER_PASSWORD.toString(), "api_key_admin_role", false) + .user(READ_USERS_USER, READ_USERS_PASSWORD.toString(), "read_users_user_role", false) .build(); @Override diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml index 47f1c05ffaaf8..15c291274bcdb 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml @@ -18,6 +18,11 @@ api_key_user_role: cluster: - manage_own_api_key +# Used to perform query user operations +read_users_user_role: + cluster: + - read_security + # Role with remote indices privileges role_remote_indices: remote_indices: diff --git a/x-pack/plugin/security/qa/security-trial/build.gradle b/x-pack/plugin/security/qa/security-trial/build.gradle index 991e1623f2f35..f6b7185c08b33 100644 --- a/x-pack/plugin/security/qa/security-trial/build.gradle +++ b/x-pack/plugin/security/qa/security-trial/build.gradle @@ -6,7 +6,6 @@ dependencies { javaRestTestImplementation project(path: xpackModule('core')) javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) - javaRestTestImplementation project(":client:rest-high-level") } tasks.named('javaRestTest') { diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 6c4aaeada74c7..3833a6466c67c 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -35,8 +35,10 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -54,9 +56,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -703,6 +707,77 @@ public void testRemoteIndicesSupportForApiKeys() throws IOException { } + @SuppressWarnings("unchecked") + public void testQueryCrossClusterApiKeysByType() throws IOException { + final List apiKeyIds = new ArrayList<>(3); + for (int i = 0; i < randomIntBetween(3, 5); i++) { + Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); + createRequest.setJsonEntity(Strings.format(""" + { + "name": "test-cross-key-query-%d", + "access": { + "search": [ + { + "names": [ "whatever" ] + } + ] + }, + "metadata": { "tag": %d, "label": "rest" } + }""", i, i)); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + apiKeyIds.add(createResponse.evaluate("id")); + } + // the "cross_cluster" keys are not "rest" type + for (String restTypeQuery : List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"bool": {"must_not": {"term": {"type": "cross_cluster"}}}}}""", """ + {"query": {"simple_query_string": {"query": "re* rest -cross_cluster", "fields": ["ty*"]}}}""", """ + {"query": {"simple_query_string": {"query": "-cross*", "fields": ["type"]}}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}""")) { + Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(restTypeQuery); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(0)); + assertThat(queryResponse.evaluate("count"), is(0)); + assertThat(queryResponse.evaluate("api_keys"), iterableWithSize(0)); + } + for (String crossClusterTypeQuery : List.of(""" + {"query": {"term": {"type": "cross_cluster" }}}""", """ + {"query": {"bool": {"must_not": {"term": {"type": "rest"}}}}}""", """ + {"query": {"simple_query_string": {"query": "cro* cross_cluster -re*", "fields": ["ty*"]}}}""", """ + {"query": {"simple_query_string": {"query": "-re*", "fields": ["type"]}}}""", """ + {"query": {"prefix": {"type": "cro" }}}""", """ + {"query": {"wildcard": {"type": "*oss_*er" }}}""", """ + {"query": {"range": {"type": {"gte": "cross", "lte": "zzzz"}}}}""")) { + Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(crossClusterTypeQuery); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(apiKeyIds.size())); + assertThat(queryResponse.evaluate("count"), is(apiKeyIds.size())); + assertThat(queryResponse.evaluate("api_keys"), iterableWithSize(apiKeyIds.size())); + Iterator apiKeys = ((List) queryResponse.evaluate("api_keys")).iterator(); + while (apiKeys.hasNext()) { + assertThat(apiKeyIds, hasItem((String) ((Map) apiKeys.next()).get("id"))); + } + } + final Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(""" + {"query": {"bool": {"must": [{"term": {"type": "cross_cluster" }}, {"term": {"metadata.tag": 2}}]}}}"""); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + final ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(1)); + assertThat(queryResponse.evaluate("count"), is(1)); + assertThat(queryResponse.evaluate("api_keys.0.name"), is("test-cross-key-query-2")); + } + public void testCreateCrossClusterApiKey() throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(""" diff --git a/x-pack/plugin/security/qa/service-account/build.gradle b/x-pack/plugin/security/qa/service-account/build.gradle index 67490a72c841d..e040fae57bdfc 100644 --- a/x-pack/plugin/security/qa/service-account/build.gradle +++ b/x-pack/plugin/security/qa/service-account/build.gradle @@ -3,7 +3,6 @@ apply plugin: 'elasticsearch.internal-test-artifact' dependencies { javaRestTestImplementation project(':x-pack:plugin:core') - javaRestTestImplementation project(':client:rest-high-level') javaRestTestImplementation project(':x-pack:plugin:security') clusterModules(project(":modules:analysis-common")) clusterModules(project(":modules:rest-root")) diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index f66631a57b4bb..e790866cf3d77 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -280,7 +280,9 @@ public class ServiceAccountIT extends ESRestTestCase { { "cluster": [ "manage", - "manage_security" + "manage_security", + "read_connector_secrets", + "write_connector_secrets" ], "indices": [ { diff --git a/x-pack/plugin/security/qa/smoke-test-all-realms/build.gradle b/x-pack/plugin/security/qa/smoke-test-all-realms/build.gradle index e044c45a2f1ab..282630443fe1a 100644 --- a/x-pack/plugin/security/qa/smoke-test-all-realms/build.gradle +++ b/x-pack/plugin/security/qa/smoke-test-all-realms/build.gradle @@ -11,7 +11,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) - javaRestTestImplementation(project(":client:rest-high-level")) } tasks.named("javaRestTest").configure { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index 1e1d8a7f0654c..3fbcd00690e82 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -9,9 +9,8 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.SecureString; @@ -377,14 +376,14 @@ private void prepareIndices() { assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge( + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge( DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX ).setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX) + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX) .get(); assertThat(refreshResponse.getFailedShards(), equalTo(0)); ensureGreen(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java deleted file mode 100644 index 7d91f8994c20a..0000000000000 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.apache.lucene.search.TotalHits; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequest; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; -import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; -import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchShardsRequest; -import org.elasticsearch.action.search.SearchShardsResponse; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.search.TransportSearchShardsAction; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.KeyStoreWrapper; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.env.Environment; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.test.SecuritySingleNodeTestCase; -import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.RemoteClusterCredentialsManager; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.authc.ApiKeyService; -import org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders; -import org.junit.BeforeClass; - -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class ReloadRemoteClusterCredentialsIT extends SecuritySingleNodeTestCase { - private static final String CLUSTER_ALIAS = "my_remote_cluster"; - - @BeforeClass - public static void disableInFips() { - assumeFalse( - "Cannot run in FIPS mode since the keystore will be password protected and sending a password in the reload" - + "settings api call, require TLS to be configured for the transport layer", - inFipsJvm() - ); - } - - @Override - public String configRoles() { - return org.elasticsearch.core.Strings.format(""" - user: - cluster: [ "ALL" ] - indices: - - names: '*' - privileges: [ "ALL" ] - remote_indices: - - names: '*' - privileges: [ "ALL" ] - clusters: ["*"] - """); - } - - @Override - public void tearDown() throws Exception { - try { - clearRemoteCluster(); - super.tearDown(); - } finally { - ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); - } - } - - private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); - - public void testReloadRemoteClusterCredentials() throws Exception { - final String credentials = randomAlphaOfLength(42); - writeCredentialsToKeyStore(credentials); - final RemoteClusterCredentialsManager clusterCredentialsManager = getInstanceFromNode(TransportService.class) - .getRemoteClusterService() - .getRemoteClusterCredentialsManager(); - // Until we reload, credentials written to keystore are not loaded into the credentials manager - assertThat(clusterCredentialsManager.hasCredentials(CLUSTER_ALIAS), is(false)); - reloadSecureSettings(); - assertThat(clusterCredentialsManager.resolveCredentials(CLUSTER_ALIAS), equalTo(credentials)); - - // Check that credentials get used for a remote connection, once we configure it - final BlockingQueue> capturedHeaders = ConcurrentCollections.newBlockingQueue(); - try (MockTransportService remoteTransport = startTransport("remoteNodeA", threadPool, capturedHeaders)) { - final TransportAddress remoteAddress = remoteTransport.getOriginalTransport() - .profileBoundAddresses() - .get("_remote_cluster") - .publishAddress(); - - configureRemoteCluster(remoteAddress); - - // Run search to trigger header capturing on the receiving side - client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); - - assertHeadersContainCredentialsThenClear(credentials, capturedHeaders); - - // Update credentials and ensure they are used - final String updatedCredentials = randomAlphaOfLength(41); - writeCredentialsToKeyStore(updatedCredentials); - reloadSecureSettings(); - - client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); - - assertHeadersContainCredentialsThenClear(updatedCredentials, capturedHeaders); - } - } - - private void assertHeadersContainCredentialsThenClear(String credentials, BlockingQueue> capturedHeaders) { - assertThat(capturedHeaders, is(not(empty()))); - for (Map actualHeaders : capturedHeaders) { - assertThat(actualHeaders, hasKey(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY)); - assertThat( - actualHeaders.get(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), - equalTo(ApiKeyService.withApiKeyPrefix(credentials)) - ); - } - capturedHeaders.clear(); - assertThat(capturedHeaders, is(empty())); - } - - private void clearRemoteCluster() throws InterruptedException, ExecutionException { - final var builder = Settings.builder() - .putNull("cluster.remote." + CLUSTER_ALIAS + ".mode") - .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds") - .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); - clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); - } - - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put("xpack.security.remote_cluster_client.ssl.enabled", false).build(); - } - - private void configureRemoteCluster(TransportAddress remoteAddress) throws InterruptedException, ExecutionException { - final Settings.Builder builder = Settings.builder(); - if (randomBoolean()) { - builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "sniff") - .put("cluster.remote." + CLUSTER_ALIAS + ".seeds", remoteAddress.toString()) - .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); - } else { - builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "proxy") - .put("cluster.remote." + CLUSTER_ALIAS + ".proxy_address", remoteAddress.toString()) - .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds"); - } - clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); - } - - private void writeCredentialsToKeyStore(String credentials) throws Exception { - final Environment environment = getInstanceFromNode(Environment.class); - final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); - keyStoreWrapper.setString("cluster.remote." + CLUSTER_ALIAS + ".credentials", credentials.toCharArray()); - keyStoreWrapper.save(environment.configFile(), new char[0], false); - } - - public static MockTransportService startTransport( - final String nodeName, - final ThreadPool threadPool, - final BlockingQueue> capturedHeaders - ) { - boolean success = false; - final Settings settings = Settings.builder() - .put("node.name", nodeName) - .put("remote_cluster_server.enabled", "true") - .put("remote_cluster.port", "0") - .put("xpack.security.remote_cluster_server.ssl.enabled", "false") - .build(); - final MockTransportService service = MockTransportService.createNewService( - settings, - VersionInformation.CURRENT, - TransportVersion.current(), - threadPool, - null - ); - try { - service.registerRequestHandler( - ClusterStateAction.NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - ClusterStateRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse( - new ClusterStateResponse(ClusterName.DEFAULT, ClusterState.builder(ClusterName.DEFAULT).build(), false) - ); - } - ); - service.registerRequestHandler( - RemoteClusterNodesAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - RemoteClusterNodesAction.Request::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse(new RemoteClusterNodesAction.Response(List.of())); - } - ); - service.registerRequestHandler( - TransportSearchShardsAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - SearchShardsRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse(new SearchShardsResponse(List.of(), List.of(), Collections.emptyMap())); - } - ); - service.registerRequestHandler( - TransportSearchAction.TYPE.name(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - SearchRequest::new, - (request, channel, task) -> { - capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); - channel.sendResponse( - new SearchResponse( - SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), - InternalAggregations.EMPTY, - null, - false, - null, - null, - 1, - null, - 1, - 1, - 0, - 100, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) - ); - } - ); - service.start(); - service.acceptIncomingRequests(); - success = true; - return service; - } finally { - if (success == false) { - service.close(); - } - } - } - - private void reloadSecureSettings() { - final AtomicReference reloadSettingsError = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - final SecureString emptyPassword = randomBoolean() ? new SecureString(new char[0]) : null; - - final var request = new NodesReloadSecureSettingsRequest(); - try { - request.nodesIds(Strings.EMPTY_ARRAY); - request.setSecureStorePassword(emptyPassword); - client().execute(TransportNodesReloadSecureSettingsAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { - try { - assertThat(nodesReloadResponse, notNullValue()); - final Map nodesMap = nodesReloadResponse.getNodesMap(); - assertThat(nodesMap.size(), equalTo(1)); - for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { - assertThat(nodeResponse.reloadException(), nullValue()); - } - } catch (final AssertionError e) { - reloadSettingsError.set(e); - } finally { - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - reloadSettingsError.set(new AssertionError("Nodes request failed", e)); - latch.countDown(); - } - }); - } finally { - request.decRef(); - } - safeAwait(latch); - if (reloadSettingsError.get() != null) { - throw reloadSettingsError.get(); - } - } -} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index e481cf70b9afe..79cf0cb9f7987 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -81,7 +81,6 @@ public void testFetchAllByEntityWithBrokenScroll() { request.scroll(TimeValue.timeValueHours(10L)); String scrollId = randomAlphaOfLength(5); - SearchHit[] hits = new SearchHit[] { new SearchHit(1), new SearchHit(2) }; Answer returnResponse = invocation -> { @SuppressWarnings("unchecked") @@ -89,7 +88,11 @@ public void testFetchAllByEntityWithBrokenScroll() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits(hits, new TotalHits(3, TotalHits.Relation.EQUAL_TO), 1), + SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(1), SearchHit.unpooled(2) }, + new TotalHits(3, TotalHits.Relation.EQUAL_TO), + 1 + ), null, null, false, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 1329158f57d4d..a693c192f5fd2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -15,10 +15,10 @@ import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -810,7 +810,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP private void refreshSecurityIndex() throws Exception { assertBusy(() -> { - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(SECURITY_MAIN_ALIAS).get(); + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(SECURITY_MAIN_ALIAS).get(); assertThat(refreshResponse.getFailedShards(), is(0)); }); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index a9af4b4ba104a..57fc1b319fa8a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -85,6 +85,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHeaderDefinition; +import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptService; @@ -260,6 +261,7 @@ import org.elasticsearch.xpack.security.action.user.TransportGetUsersAction; import org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction; import org.elasticsearch.xpack.security.action.user.TransportPutUserAction; +import org.elasticsearch.xpack.security.action.user.TransportQueryUserAction; import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; @@ -365,6 +367,7 @@ import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestProfileHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction; +import org.elasticsearch.xpack.security.rest.action.user.RestQueryUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry; import org.elasticsearch.xpack.security.support.ExtensionComponents; @@ -391,7 +394,6 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Predicate; @@ -1316,6 +1318,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(ClearPrivilegesCacheAction.INSTANCE, TransportClearPrivilegesCacheAction.class), new ActionHandler<>(ClearSecurityCacheAction.INSTANCE, TransportClearSecurityCacheAction.class), new ActionHandler<>(GetUsersAction.INSTANCE, TransportGetUsersAction.class), + new ActionHandler<>(ActionTypes.QUERY_USER_ACTION, TransportQueryUserAction.class), new ActionHandler<>(PutUserAction.INSTANCE, TransportPutUserAction.class), new ActionHandler<>(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class), new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), @@ -1388,6 +1391,7 @@ public List getActionFilters() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -1406,6 +1410,7 @@ public List getRestHandlers( new RestClearApiKeyCacheAction(settings, getLicenseState()), new RestClearServiceAccountTokenStoreCacheAction(settings, getLicenseState()), new RestGetUsersAction(settings, getLicenseState()), + new RestQueryUserAction(settings, getLicenseState()), new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), @@ -1847,13 +1852,12 @@ protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadCo } @Override - public UnaryOperator getRestHandlerInterceptor(ThreadContext threadContext) { - return handler -> new SecurityRestFilter( + public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) { + return new SecurityRestFilter( enabled, threadContext, secondayAuthc.get(), auditTrailService.get(), - handler, operatorPrivilegesService.get() ); } @@ -1975,14 +1979,19 @@ private void reloadSharedSecretsForJwtRealms(Settings settingsWithKeystore) { * See {@link TransportReloadRemoteClusterCredentialsAction} for more context. */ private void reloadRemoteClusterCredentials(Settings settingsWithKeystore) { + // Using `settings` instead of `settingsWithKeystore` is deliberate: we are not interested in secure settings here + if (DiscoveryNode.isStateless(settings)) { + // Stateless does not support remote cluster operations. Skip. + return; + } + final PlainActionFuture future = new PlainActionFuture<>(); getClient().execute( ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, new TransportReloadRemoteClusterCredentialsAction.Request(settingsWithKeystore), future ); - assert future.isDone() : "expecting local-only action call to return immediately on invocation"; - future.actionGet(0, TimeUnit.NANOSECONDS); + future.actionGet(); } static final class ValidateLicenseForFIPS implements BiConsumer { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java index 4077597a7ef16..9d25802544d38 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java @@ -10,9 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.tasks.Task; @@ -27,10 +26,25 @@ import org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators; import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; -public final class TransportQueryApiKeyAction extends HandledTransportAction { +public final class TransportQueryApiKeyAction extends TransportAction { + + // API keys with no "type" field are implicitly of type "rest" (this is the case for all API Keys created before v8.9). + // The below runtime field ensures that the "type" field can be used by the {@link RestQueryApiKeyAction}, + // while making the implicit "rest" type feature transparent to the caller (hence all keys are either "rest" + // or "cross_cluster", and the "type" is always set). + // This can be improved, to get rid of the runtime performance impact of the runtime field, by reindexing + // the api key docs and setting the "type" to "rest" if empty. But the infrastructure to run such a maintenance + // task on a system index (once the cluster version permits) is not currently available. + public static final String API_KEY_TYPE_RUNTIME_MAPPING_FIELD = "runtime_key_type"; + private static final Map API_KEY_TYPE_RUNTIME_MAPPING = Map.of( + API_KEY_TYPE_RUNTIME_MAPPING_FIELD, + Map.of("type", "keyword", "script", Map.of("source", "emit(field('type').get(\"rest\"));")) + ); private final ApiKeyService apiKeyService; private final SecurityContext securityContext; @@ -42,7 +56,7 @@ public TransportQueryApiKeyAction( ApiKeyService apiKeyService, SecurityContext context ) { - super(QueryApiKeyAction.NAME, transportService, actionFilters, QueryApiKeyRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(QueryApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.securityContext = context; } @@ -66,12 +80,19 @@ protected void doExecute(Task task, QueryApiKeyRequest request, ActionListener { + if (API_KEY_TYPE_RUNTIME_MAPPING_FIELD.equals(fieldName)) { + accessesApiKeyTypeField.set(true); + } + }, request.isFilterForCurrentUser() ? authentication : null); searchSourceBuilder.query(apiKeyBoolQueryBuilder); + // only add the query-level runtime field to the search request if it's actually referring the "type" field + if (accessesApiKeyTypeField.get()) { + searchSourceBuilder.runtimeMappings(API_KEY_TYPE_RUNTIME_MAPPING); + } + if (request.getFieldSortBuilders() != null) { translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java index 7b45313b0e24f..500ef7c51f4bc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java @@ -30,9 +30,7 @@ */ public final class TransportSamlCompleteLogoutAction extends HandledTransportAction { - public static final ActionType TYPE = ActionType.emptyResponse( - "cluster:admin/xpack/security/saml/complete_logout" - ); + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/security/saml/complete_logout"); private final Realms realms; @Inject diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java index d6f54e9d3e9e1..22dcf1b4f9daa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java @@ -13,16 +13,22 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.security.Security; import java.io.IOException; +import java.util.function.Supplier; /** * This is a local-only action which updates remote cluster credentials for remote cluster connections, from keystore settings reloaded via @@ -39,18 +45,38 @@ public class TransportReloadRemoteClusterCredentialsAction extends TransportActi ActionResponse.Empty> { private final RemoteClusterService remoteClusterService; + private final ClusterService clusterService; @Inject - public TransportReloadRemoteClusterCredentialsAction(TransportService transportService, ActionFilters actionFilters) { + public TransportReloadRemoteClusterCredentialsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters + ) { super(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name(), actionFilters, transportService.getTaskManager()); this.remoteClusterService = transportService.getRemoteClusterService(); + this.clusterService = clusterService; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { - // We avoid stashing and marking context as system to keep the action as minimal as possible (i.e., avoid copying context) - remoteClusterService.updateRemoteClusterCredentials(request.getSettings()); - listener.onResponse(ActionResponse.Empty.INSTANCE); + assert Transports.assertNotTransportThread("Remote connection re-building is too much for a transport thread"); + final ClusterState clusterState = clusterService.state(); + final ClusterBlockException clusterBlockException = checkBlock(clusterState); + if (clusterBlockException != null) { + throw clusterBlockException; + } + // Use a supplier to ensure we resolve cluster settings inside a synchronized block, to prevent race conditions + final Supplier settingsSupplier = () -> { + final Settings persistentSettings = clusterState.metadata().persistentSettings(); + final Settings transientSettings = clusterState.metadata().transientSettings(); + return Settings.builder().put(request.getSettings(), true).put(persistentSettings, false).put(transientSettings, false).build(); + }; + remoteClusterService.updateRemoteClusterCredentials(settingsSupplier, listener.safeMap(ignored -> ActionResponse.Empty.INSTANCE)); + } + + private ClusterBlockException checkBlock(ClusterState clusterState) { + return clusterState.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } public static class Request extends ActionRequest { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index fc8f931612907..b696c93cf6899 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -26,9 +26,7 @@ public class TransportChangePasswordAction extends HandledTransportAction { - public static final ActionType TYPE = ActionType.emptyResponse( - "cluster:admin/xpack/security/user/change_password" - ); + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/security/user/change_password"); private final Settings settings; private final NativeUsersStore nativeUsersStore; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java new file mode 100644 index 0000000000000..2a9aef73ff62a --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.support.UserBoolQueryBuilder; + +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; +import static org.elasticsearch.xpack.security.support.UserBoolQueryBuilder.USER_FIELD_NAME_TRANSLATOR; + +public final class TransportQueryUserAction extends TransportAction { + private final NativeUsersStore usersStore; + private static final Set FIELD_NAMES_WITH_SORT_SUPPORT = Set.of("username", "roles", "enabled"); + + @Inject + public TransportQueryUserAction(TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { + super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager()); + this.usersStore = usersStore; + } + + @Override + protected void doExecute(Task task, QueryUserRequest request, ActionListener listener) { + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource() + .version(false) + .fetchSource(true) + .trackTotalHits(true); + + if (request.getFrom() != null) { + searchSourceBuilder.from(request.getFrom()); + } + if (request.getSize() != null) { + searchSourceBuilder.size(request.getSize()); + } + + searchSourceBuilder.query(UserBoolQueryBuilder.build(request.getQueryBuilder())); + + if (request.getFieldSortBuilders() != null) { + translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder); + } + + if (request.getSearchAfterBuilder() != null) { + searchSourceBuilder.searchAfter(request.getSearchAfterBuilder().getSortValues()); + } + + final SearchRequest searchRequest = new SearchRequest(new String[] { SECURITY_MAIN_ALIAS }, searchSourceBuilder); + usersStore.queryUsers(searchRequest, listener); + } + + // package private for testing + static void translateFieldSortBuilders(List fieldSortBuilders, SearchSourceBuilder searchSourceBuilder) { + fieldSortBuilders.forEach(fieldSortBuilder -> { + if (fieldSortBuilder.getNestedSort() != null) { + throw new IllegalArgumentException("nested sorting is not supported for User query"); + } + if (FieldSortBuilder.DOC_FIELD_NAME.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(fieldSortBuilder.getFieldName()); + if (FIELD_NAMES_WITH_SORT_SUPPORT.contains(translatedFieldName) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "sorting is not supported for field [%s] in User query", fieldSortBuilder.getFieldName()) + ); + } + + if (translatedFieldName.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final FieldSortBuilder translatedFieldSortBuilder = new FieldSortBuilder(translatedFieldName).order( + fieldSortBuilder.order() + ) + .missing(fieldSortBuilder.missing()) + .unmappedType(fieldSortBuilder.unmappedType()) + .setFormat(fieldSortBuilder.getFormat()); + + if (fieldSortBuilder.sortMode() != null) { + translatedFieldSortBuilder.sortMode(fieldSortBuilder.sortMode()); + } + if (fieldSortBuilder.getNumericType() != null) { + translatedFieldSortBuilder.setNumericType(fieldSortBuilder.getNumericType()); + } + searchSourceBuilder.sort(translatedFieldSortBuilder); + } + } + }); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java index 4647ac0cf5f66..70670840a912d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java @@ -28,7 +28,7 @@ */ public class TransportSetEnabledAction extends HandledTransportAction { - public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/xpack/security/user/set_enabled"); + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/security/user/set_enabled"); private final Settings settings; private final SecurityContext securityContext; private final NativeUsersStore usersStore; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index e2b9c36c1d0ee..87c372f561757 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -1325,6 +1325,10 @@ private static void withBaseUpdateApiKeyFields(final XContentBuilder builder, fi // because it replaces any metadata previously associated with the API key builder.field("metadata", baseUpdateApiKeyRequest.getMetadata()); } + builder.field( + "expiration", + baseUpdateApiKeyRequest.getExpiration() != null ? baseUpdateApiKeyRequest.getExpiration().toString() : null + ); } private static void withRoleDescriptor(XContentBuilder builder, RoleDescriptor roleDescriptor) throws IOException { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 7dbe402cea046..e806e11d3f16d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -112,9 +112,9 @@ public AuthenticationService( anonymousUser, new AuthenticationContextSerializer(), new ServiceAccountAuthenticator(serviceAccountService, nodeName, meterRegistry), - new OAuth2TokenAuthenticator(tokenService), + new OAuth2TokenAuthenticator(tokenService, meterRegistry), new ApiKeyAuthenticator(apiKeyService, nodeName, meterRegistry), - new RealmsAuthenticator(numInvalidation, lastSuccessfulAuthCache) + new RealmsAuthenticator(numInvalidation, lastSuccessfulAuthCache, meterRegistry) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java index 6f50cd1f1db1d..ffe6f83a37b2f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticator.java @@ -12,17 +12,38 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.support.BearerToken; +import org.elasticsearch.xpack.security.metric.InstrumentedSecurityActionListener; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; +import org.elasticsearch.xpack.security.metric.SecurityMetrics; + +import java.util.Map; +import java.util.function.LongSupplier; class OAuth2TokenAuthenticator implements Authenticator { + public static final String ATTRIBUTE_AUTHC_FAILURE_REASON = "es.security.token_authc_failure_reason"; + private static final Logger logger = LogManager.getLogger(OAuth2TokenAuthenticator.class); + + private final SecurityMetrics authenticationMetrics; private final TokenService tokenService; - OAuth2TokenAuthenticator(TokenService tokenService) { + OAuth2TokenAuthenticator(TokenService tokenService, MeterRegistry meterRegistry) { + this(tokenService, meterRegistry, System::nanoTime); + } + + OAuth2TokenAuthenticator(TokenService tokenService, MeterRegistry meterRegistry, LongSupplier nanoTimeSupplier) { + this.authenticationMetrics = new SecurityMetrics<>( + SecurityMetricType.AUTHC_OAUTH2_TOKEN, + meterRegistry, + this::buildMetricAttributes, + nanoTimeSupplier + ); this.tokenService = tokenService; } @@ -45,6 +66,10 @@ public void authenticate(Context context, ActionListener> listener) { tokenService.tryAuthenticateToken(bearerToken.credentials(), ActionListener.wrap(userToken -> { if (userToken != null) { listener.onResponse(AuthenticationResult.success(userToken.getAuthentication())); @@ -62,4 +87,11 @@ public void authenticate(Context context, ActionListener buildMetricAttributes(BearerToken token, String failureReason) { + if (failureReason != null) { + return Map.of(ATTRIBUTE_AUTHC_FAILURE_REASON, failureReason); + } + return Map.of(); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticator.java index 25054982655c4..51af3a7eda665 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticator.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.core.common.IteratingActionListener; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; @@ -25,9 +26,13 @@ import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.support.RealmUserLookup; +import org.elasticsearch.xpack.security.metric.InstrumentedSecurityActionListener; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; +import org.elasticsearch.xpack.security.metric.SecurityMetrics; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -35,19 +40,40 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; +import java.util.function.LongSupplier; import static org.elasticsearch.core.Strings.format; public class RealmsAuthenticator implements Authenticator { + public static final String ATTRIBUTE_REALM_NAME = "es.security.realm_name"; + public static final String ATTRIBUTE_REALM_TYPE = "es.security.realm_type"; + public static final String ATTRIBUTE_REALM_AUTHC_FAILURE_REASON = "es.security.realm_authc_failure_reason"; + private static final Logger logger = LogManager.getLogger(RealmsAuthenticator.class); private final AtomicLong numInvalidation; private final Cache lastSuccessfulAuthCache; + private final SecurityMetrics authenticationMetrics; + + public RealmsAuthenticator(AtomicLong numInvalidation, Cache lastSuccessfulAuthCache, MeterRegistry meterRegistry) { + this(numInvalidation, lastSuccessfulAuthCache, meterRegistry, System::nanoTime); + } - public RealmsAuthenticator(AtomicLong numInvalidation, Cache lastSuccessfulAuthCache) { + RealmsAuthenticator( + AtomicLong numInvalidation, + Cache lastSuccessfulAuthCache, + MeterRegistry meterRegistry, + LongSupplier nanoTimeSupplier + ) { this.numInvalidation = numInvalidation; this.lastSuccessfulAuthCache = lastSuccessfulAuthCache; + this.authenticationMetrics = new SecurityMetrics<>( + SecurityMetricType.AUTHC_REALMS, + meterRegistry, + this::buildMetricAttributes, + nanoTimeSupplier + ); } @Override @@ -141,66 +167,69 @@ private void consumeToken(Context context, ActionListener { - assert result != null : "Realm " + realm + " produced a null authentication result"; - logger.debug( - "Authentication of [{}] using realm [{}] with token [{}] was [{}]", - authenticationToken.principal(), - realm, - authenticationToken.getClass().getSimpleName(), - result - ); - if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { - // user was authenticated, populate the authenticated by information - authenticatedByRef.set(realm); - authenticationResultRef.set(result); - if (lastSuccessfulAuthCache != null && startInvalidation == numInvalidation.get()) { - lastSuccessfulAuthCache.put(authenticationToken.principal(), realm); - } - userListener.onResponse(result.getValue()); - } else { - // the user was not authenticated, call this so we can audit the correct event - context.getRequest().realmAuthenticationFailed(authenticationToken, realm.name()); - if (result.getStatus() == AuthenticationResult.Status.TERMINATE) { - final var resultException = result.getException(); - if (resultException != null) { - logger.info( - () -> format( - "Authentication of [%s] was terminated by realm [%s] - %s", + realm.authenticate( + authenticationToken, + InstrumentedSecurityActionListener.wrapForAuthc(authenticationMetrics, realm, ActionListener.wrap(result -> { + assert result != null : "Realm " + realm + " produced a null authentication result"; + logger.debug( + "Authentication of [{}] using realm [{}] with token [{}] was [{}]", + authenticationToken.principal(), + realm, + authenticationToken.getClass().getSimpleName(), + result + ); + if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { + // user was authenticated, populate the authenticated by information + authenticatedByRef.set(realm); + authenticationResultRef.set(result); + if (lastSuccessfulAuthCache != null && startInvalidation == numInvalidation.get()) { + lastSuccessfulAuthCache.put(authenticationToken.principal(), realm); + } + userListener.onResponse(result.getValue()); + } else { + // the user was not authenticated, call this so we can audit the correct event + context.getRequest().realmAuthenticationFailed(authenticationToken, realm.name()); + if (result.getStatus() == AuthenticationResult.Status.TERMINATE) { + final var resultException = result.getException(); + if (resultException != null) { + logger.info( + () -> format( + "Authentication of [%s] was terminated by realm [%s] - %s", + authenticationToken.principal(), + realm.name(), + result.getMessage() + ), + resultException + ); + userListener.onFailure(resultException); + } else { + logger.info( + "Authentication of [{}] was terminated by realm [{}] - {}", authenticationToken.principal(), realm.name(), result.getMessage() - ), - resultException - ); - userListener.onFailure(resultException); + ); + userListener.onFailure(AuthenticationTerminatedSuccessfullyException.INSTANCE); + } } else { - logger.info( - "Authentication of [{}] was terminated by realm [{}] - {}", - authenticationToken.principal(), - realm.name(), - result.getMessage() - ); - userListener.onFailure(AuthenticationTerminatedSuccessfullyException.INSTANCE); - } - } else { - if (result.getMessage() != null) { - messages.put(realm, new Tuple<>(result.getMessage(), result.getException())); + if (result.getMessage() != null) { + messages.put(realm, new Tuple<>(result.getMessage(), result.getException())); + } + userListener.onResponse(null); } - userListener.onResponse(null); } - } - }, (ex) -> { - logger.warn( - () -> format( - "An error occurred while attempting to authenticate [%s] against realm [%s]", - authenticationToken.principal(), - realm.name() - ), - ex - ); - userListener.onFailure(ex); - })); + }, (ex) -> { + logger.warn( + () -> format( + "An error occurred while attempting to authenticate [%s] against realm [%s]", + authenticationToken.principal(), + realm.name() + ), + ex + ); + userListener.onFailure(ex); + })) + ); } else { userListener.onResponse(null); } @@ -362,4 +391,14 @@ public synchronized Throwable fillInStackTrace() { return this; } } + + private Map buildMetricAttributes(Realm realm, String failureReason) { + final Map attributes = new HashMap<>(failureReason != null ? 3 : 2); + attributes.put(ATTRIBUTE_REALM_NAME, realm.name()); + attributes.put(ATTRIBUTE_REALM_TYPE, realm.type()); + if (failureReason != null) { + attributes.put(ATTRIBUTE_REALM_AUTHC_FAILURE_REASON, failureReason); + } + return attributes; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 9c378e0e1156e..26f6268aaa5dc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -154,7 +154,7 @@ * Service responsible for the creation, validation, and other management of {@link UserToken} * objects for authentication */ -public final class TokenService { +public class TokenService { /** * The parameters below are used to generate the cryptographic key that is used to encrypt the @@ -210,7 +210,7 @@ public final class TokenService { static final TransportVersion VERSION_ACCESS_TOKENS_AS_UUIDS = TransportVersions.V_7_2_0; static final TransportVersion VERSION_MULTIPLE_CONCURRENT_REFRESHES = TransportVersions.V_7_2_0; static final TransportVersion VERSION_CLIENT_AUTH_FOR_REFRESH = TransportVersions.V_8_2_0; - static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_500_061; + static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_10_X; private static final Logger logger = LogManager.getLogger(TokenService.class); @@ -234,6 +234,7 @@ public final class TokenService { /** * Creates a new token service */ + @SuppressWarnings("this-escape") public TokenService( Settings settings, Clock clock, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 36f78682b6bd1..81aa487f73e2c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -43,6 +44,7 @@ import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -57,6 +59,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Consumer; import java.util.function.Supplier; @@ -161,6 +164,40 @@ public void getUsers(String[] userNames, final ActionListener> } } + public void queryUsers(SearchRequest searchRequest, ActionListener listener) { + final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); + if (frozenSecurityIndex.indexExists() == false) { + logger.debug("security index does not exist"); + listener.onResponse(QueryUserResponse.emptyResponse()); + } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); + } else { + securityIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + TransportSearchAction.TYPE, + searchRequest, + ActionListener.wrap(searchResponse -> { + final long total = searchResponse.getHits().getTotalHits().value; + if (total == 0) { + logger.debug("No users found for query [{}]", searchRequest.source().query()); + listener.onResponse(QueryUserResponse.emptyResponse()); + return; + } + + final List userItem = Arrays.stream(searchResponse.getHits().getHits()).map(hit -> { + UserAndPassword userAndPassword = transformUser(hit.getId(), hit.getSourceAsMap()); + return userAndPassword != null ? new QueryUserResponse.Item(userAndPassword.user(), hit.getSortValues()) : null; + }).filter(Objects::nonNull).toList(); + listener.onResponse(new QueryUserResponse(total, userItem)); + }, listener::onFailure) + ) + ); + } + } + void getUserCount(final ActionListener listener) { final SecurityIndexManager frozenSecurityIndex = this.securityIndex.defensiveCopy(); if (frozenSecurityIndex.indexExists() == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index 777fe5f71b0a0..abd586920f2d8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -26,7 +26,7 @@ final class ElasticServiceAccounts { "enterprise-search-server", new RoleDescriptor( NAMESPACE + "/enterprise-search-server", - new String[] { "manage", "manage_security" }, + new String[] { "manage", "manage_security", "read_connector_secrets", "write_connector_secrets" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder() .indices( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index e4e9bc453ee83..c8920d0f498d0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -778,7 +778,7 @@ private void authorizeBulkItems( } return resolved; }); - actionToIndicesMap.compute(itemAction, (ignore, resolvedIndicesSet) -> addToOrCreateSet(resolvedIndicesSet, resolvedIndex)); + actionToIndicesMap.computeIfAbsent(itemAction, k -> new HashSet<>()).add(resolvedIndex); } final ActionListener>> bulkAuthzListener = ActionListener.wrap( @@ -800,15 +800,9 @@ private void authorizeBulkItems( final String resolvedIndex = resolvedIndexNames.get(item.index()); final String itemAction = getAction(item); if (actionToIndicesAccessControl.get(itemAction).hasIndexPermissions(resolvedIndex)) { - actionToGrantedIndicesMap.compute( - itemAction, - (ignore, resolvedIndicesSet) -> addToOrCreateSet(resolvedIndicesSet, resolvedIndex) - ); + actionToGrantedIndicesMap.computeIfAbsent(itemAction, ignore -> new HashSet<>()).add(resolvedIndex); } else { - actionToDeniedIndicesMap.compute( - itemAction, - (ignore, resolvedIndicesSet) -> addToOrCreateSet(resolvedIndicesSet, resolvedIndex) - ); + actionToDeniedIndicesMap.computeIfAbsent(itemAction, ignore -> new HashSet<>()).add(resolvedIndex); item.abort( resolvedIndex, actionDenied( @@ -876,12 +870,6 @@ private void authorizeBulkItems( }, listener::onFailure)); } - private static Set addToOrCreateSet(Set set, String item) { - final Set localSet = set != null ? set : new HashSet<>(4); - localSet.add(item); - return localSet; - } - private static String resolveIndexNameDateMath(BulkItemRequest bulkItemRequest) { final ResolvedIndices resolvedIndices = IndicesAndAliasesResolver.resolveIndicesAndAliasesWithoutWildcards( getAction(bulkItemRequest), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java index d6a85b4e9ddf6..02ac292aee781 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java @@ -38,6 +38,24 @@ public enum SecurityMetricType { ) ), + AUTHC_OAUTH2_TOKEN( + SecurityMetricGroup.AUTHC, + new SecurityMetricInfo("es.security.authc.token.success.total", "Number of successful OAuth2 token authentications.", "count"), + new SecurityMetricInfo("es.security.authc.token.failures.total", "Number of failed OAuth2 token authentications.", "count"), + new SecurityMetricInfo( + "es.security.authc.token.time", + "Time it took (in nanoseconds) to execute OAuth2 token authentication.", + "ns" + ) + ), + + AUTHC_REALMS( + SecurityMetricGroup.AUTHC, + new SecurityMetricInfo("es.security.authc.realms.success.total", "Number of successful realm authentications.", "count"), + new SecurityMetricInfo("es.security.authc.realms.failures.total", "Number of failed realm authentications.", "count"), + new SecurityMetricInfo("es.security.authc.realms.time", "Time it took (in nanoseconds) to execute realm authentication.", "ns") + ), + ; private final SecurityMetricGroup group; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index f7d5ada9b9538..6c3c25a951744 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -8,18 +8,15 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.FilterRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.RestRequestFilter; -import org.elasticsearch.rest.RestResponse; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authz.restriction.WorkflowService; @@ -27,7 +24,7 @@ import static org.elasticsearch.core.Strings.format; -public class SecurityRestFilter extends FilterRestHandler implements RestHandler { +public class SecurityRestFilter implements RestInterceptor { private static final Logger logger = LogManager.getLogger(SecurityRestFilter.class); @@ -42,10 +39,8 @@ public SecurityRestFilter( ThreadContext threadContext, SecondaryAuthenticator secondaryAuthenticator, AuditTrailService auditTrailService, - RestHandler restHandler, OperatorPrivileges.OperatorPrivilegesService operatorPrivilegesService ) { - super(restHandler); this.enabled = enabled; this.threadContext = threadContext; this.secondaryAuthenticator = secondaryAuthenticator; @@ -57,57 +52,52 @@ public SecurityRestFilter( } @Override - public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + public void intercept(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener listener) + throws Exception { // requests with the OPTIONS method should be handled elsewhere, and not by calling {@code RestHandler#handleRequest} // authn is bypassed for HTTP requests with the OPTIONS method, so this sanity check prevents dispatching unauthenticated requests if (request.method() == Method.OPTIONS) { handleException( request, - channel, - new ElasticsearchSecurityException("Cannot dispatch OPTIONS request, as they are not authenticated") + new ElasticsearchSecurityException("Cannot dispatch OPTIONS request, as they are not authenticated"), + listener ); return; } if (enabled == false) { - doHandleRequest(request, channel, client); + listener.onResponse(Boolean.TRUE); return; } - final RestRequest wrappedRequest = maybeWrapRestRequest(request); + final RestRequest wrappedRequest = maybeWrapRestRequest(request, targetHandler); auditTrailService.get().authenticationSuccess(wrappedRequest); secondaryAuthenticator.authenticateAndAttachToContext(wrappedRequest, ActionListener.wrap(secondaryAuthentication -> { if (secondaryAuthentication != null) { logger.trace("Found secondary authentication {} in REST request [{}]", secondaryAuthentication, request.uri()); } - WorkflowService.resolveWorkflowAndStoreInThreadContext(getConcreteRestHandler(), threadContext); + WorkflowService.resolveWorkflowAndStoreInThreadContext(targetHandler, threadContext); - doHandleRequest(request, channel, client); - }, e -> handleException(request, channel, e))); + doHandleRequest(request, channel, targetHandler, listener); + }, e -> handleException(request, e, listener))); } - private void doHandleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + private void doHandleRequest(RestRequest request, RestChannel channel, RestHandler targetHandler, ActionListener listener) { threadContext.sanitizeHeaders(); // operator privileges can short circuit to return a non-successful response - if (operatorPrivilegesService.checkRest(getConcreteRestHandler(), request, channel, threadContext)) { - try { - getDelegate().handleRequest(request, channel, client); - } catch (Exception e) { - logger.debug(() -> format("Request handling failed for REST request [%s]", request.uri()), e); - throw e; - } + if (operatorPrivilegesService.checkRest(targetHandler, request, channel, threadContext)) { + listener.onResponse(Boolean.TRUE); + } else { + // The service sends its own response if it returns `false`. + // That's kind of ugly, and it would be better if we throw an exception and let the rest controller serialize it as normal + listener.onResponse(Boolean.FALSE); } } - protected void handleException(RestRequest request, RestChannel channel, Exception e) { + protected void handleException(RestRequest request, Exception e, ActionListener listener) { logger.debug(() -> format("failed for REST request [%s]", request.uri()), e); threadContext.sanitizeHeaders(); - try { - channel.sendResponse(new RestResponse(channel, e)); - } catch (Exception inner) { - inner.addSuppressed(e); - logger.error((Supplier) () -> "failed to send failure response for uri [" + request.uri() + "]", inner); - } + listener.onFailure(e); } // for testing @@ -115,8 +105,8 @@ OperatorPrivileges.OperatorPrivilegesService getOperatorPrivilegesService() { return operatorPrivilegesService; } - private RestRequest maybeWrapRestRequest(RestRequest restRequest) { - if (getConcreteRestHandler() instanceof RestRequestFilter rrf) { + private RestRequest maybeWrapRestRequest(RestRequest restRequest, RestHandler targetHandler) { + if (targetHandler instanceof RestRequestFilter rrf) { return rrf.getFilteredRequest(restRequest); } return restRequest; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java new file mode 100644 index 0000000000000..407fe36fa82d3 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.user; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Rest action to search for Users + */ +public final class RestQueryUserAction extends SecurityBaseRestHandler { + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "query_user_request_payload", + a -> new Payload((QueryBuilder) a[0], (Integer) a[1], (Integer) a[2], (List) a[3], (SearchAfterBuilder) a[4]) + ); + + static { + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseTopLevelQuery(p), new ParseField("query")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("from")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return new FieldSortBuilder(p.text()); + } else if (p.currentToken() == XContentParser.Token.START_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); + final FieldSortBuilder fieldSortBuilder = FieldSortBuilder.fromXContent(p, p.currentName()); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); + return fieldSortBuilder; + } else { + throw new IllegalArgumentException("mal-formatted sort object"); + } + }, new ParseField("sort")); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> SearchAfterBuilder.fromXContent(p), + new ParseField("search_after"), + ObjectParser.ValueType.VALUE_ARRAY + ); + } + + /** + * @param settings the node's settings + * @param licenseState the license state that will be used to determine if + * security is licensed + */ + public RestQueryUserAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_security/_query/user"), new Route(POST, "/_security/_query/user")); + } + + @Override + public String getName() { + return "xpack_security_query_user"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final QueryUserRequest queryUserRequest; + if (request.hasContentOrSourceParam()) { + final Payload payload = PARSER.parse(request.contentOrSourceParamParser(), null); + queryUserRequest = new QueryUserRequest( + payload.queryBuilder, + payload.from, + payload.size, + payload.fieldSortBuilders, + payload.searchAfterBuilder + ); + } else { + queryUserRequest = new QueryUserRequest(null, null, null, null, null); + } + return channel -> client.execute(ActionTypes.QUERY_USER_ACTION, queryUserRequest, new RestToXContentListener<>(channel)); + } + + private record Payload( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) {} +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java index 28ecd5ffe5b57..9f7b84e4a2698 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java @@ -13,21 +13,29 @@ import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.SimpleQueryStringBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.index.search.QueryParserHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authc.ApiKeyService; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; public class ApiKeyBoolQueryBuilder extends BoolQueryBuilder { @@ -36,10 +44,15 @@ public class ApiKeyBoolQueryBuilder extends BoolQueryBuilder { "_id", "doc_type", "name", + "type", + API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "api_key_invalidated", "invalidation_time", "creation_time", - "expiration_time" + "expiration_time", + "metadata_flattened", + "creator.principal", + "creator.realm" ); private ApiKeyBoolQueryBuilder() {} @@ -56,17 +69,23 @@ private ApiKeyBoolQueryBuilder() {} * * @param queryBuilder This represents the query parsed directly from the user input. It is validated * and transformed (see above). + * @param fieldNameVisitor This {@code Consumer} is invoked with all the (index-level) field names referred to in the passed-in query. * @param authentication The user's authentication object. If present, it will be used to filter the results * to only include API keys owned by the user. * @return A specialised query builder for API keys that is safe to run on the security index. */ - public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable Authentication authentication) { + public static ApiKeyBoolQueryBuilder build( + QueryBuilder queryBuilder, + Consumer fieldNameVisitor, + @Nullable Authentication authentication + ) { final ApiKeyBoolQueryBuilder finalQuery = new ApiKeyBoolQueryBuilder(); if (queryBuilder != null) { - QueryBuilder processedQuery = doProcess(queryBuilder); + QueryBuilder processedQuery = doProcess(queryBuilder, fieldNameVisitor); finalQuery.must(processedQuery); } finalQuery.filter(QueryBuilders.termQuery("doc_type", "api_key")); + fieldNameVisitor.accept("doc_type"); if (authentication != null) { if (authentication.isApiKey()) { @@ -77,8 +96,10 @@ public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable finalQuery.filter(QueryBuilders.idsQuery().addIds(apiKeyId)); } else { finalQuery.filter(QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal())); + fieldNameVisitor.accept("creator.principal"); final String[] realms = ApiKeyService.getOwnersRealmNames(authentication); final QueryBuilder realmsQuery = ApiKeyService.filterForRealmNames(realms); + fieldNameVisitor.accept("creator.realm"); assert realmsQuery != null; finalQuery.filter(realmsQuery); } @@ -86,15 +107,15 @@ public static ApiKeyBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable return finalQuery; } - private static QueryBuilder doProcess(QueryBuilder qb) { + private static QueryBuilder doProcess(QueryBuilder qb, Consumer fieldNameVisitor) { if (qb instanceof final BoolQueryBuilder query) { final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() .minimumShouldMatch(query.minimumShouldMatch()) .adjustPureNegative(query.adjustPureNegative()); - query.must().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::must); - query.should().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::should); - query.mustNot().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::mustNot); - query.filter().stream().map(ApiKeyBoolQueryBuilder::doProcess).forEach(newQuery::filter); + query.must().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::must); + query.should().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::should); + query.mustNot().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::mustNot); + query.filter().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::filter); return newQuery; } else if (qb instanceof MatchAllQueryBuilder) { return qb; @@ -102,29 +123,35 @@ private static QueryBuilder doProcess(QueryBuilder qb) { return qb; } else if (qb instanceof final TermQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.termQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); } else if (qb instanceof final ExistsQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.existsQuery(translatedFieldName); } else if (qb instanceof final TermsQueryBuilder query) { if (query.termsLookup() != null) { throw new IllegalArgumentException("terms query with terms lookup is not supported for API Key query"); } final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.termsQuery(translatedFieldName, query.getValues()); } else if (qb instanceof final PrefixQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.prefixQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); } else if (qb instanceof final WildcardQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) .caseInsensitive(query.caseInsensitive()) .rewrite(query.rewrite()); } else if (qb instanceof final RangeQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); if (query.relation() != null) { throw new IllegalArgumentException("range query with relation is not supported for API Key query"); } + final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); final RangeQueryBuilder newQuery = QueryBuilders.rangeQuery(translatedFieldName); if (query.format() != null) { newQuery.format(query.format()); @@ -139,6 +166,36 @@ private static QueryBuilder doProcess(QueryBuilder qb) { newQuery.to(query.to()).includeUpper(query.includeUpper()); } return newQuery.boost(query.boost()); + } else if (qb instanceof final SimpleQueryStringBuilder simpleQueryStringBuilder) { + if (simpleQueryStringBuilder.fields().isEmpty()) { + simpleQueryStringBuilder.field("*"); + } + // override lenient if querying all the fields, because, due to different field mappings, + // the query parsing will almost certainly fail otherwise + if (QueryParserHelper.hasAllFieldsWildcard(simpleQueryStringBuilder.fields().keySet())) { + simpleQueryStringBuilder.lenient(true); + } + Map requestedFields = new HashMap<>(simpleQueryStringBuilder.fields()); + simpleQueryStringBuilder.fields().clear(); + for (Map.Entry requestedFieldNameOrPattern : requestedFields.entrySet()) { + for (String translatedField : ApiKeyFieldNameTranslators.translatePattern(requestedFieldNameOrPattern.getKey())) { + simpleQueryStringBuilder.fields() + .compute( + translatedField, + (k, v) -> (v == null) ? requestedFieldNameOrPattern.getValue() : v * requestedFieldNameOrPattern.getValue() + ); + fieldNameVisitor.accept(translatedField); + } + } + if (simpleQueryStringBuilder.fields().isEmpty()) { + // A SimpleQueryStringBuilder with empty fields() will eventually produce a SimpleQueryString query + // that accesses all the fields, including disallowed ones. + // Instead, the behavior we're after is that a query that accesses only disallowed fields should + // not match any docs. + return new MatchNoneQueryBuilder(); + } else { + return simpleQueryStringBuilder; + } } else { throw new IllegalArgumentException("Query type [" + qb.getName() + "] is not supported for API Key query"); } @@ -159,9 +216,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws } static boolean isIndexFieldNameAllowed(String fieldName) { - return ALLOWED_EXACT_INDEX_FIELD_NAMES.contains(fieldName) - || fieldName.startsWith("metadata_flattened.") - || fieldName.startsWith("creator."); + return ALLOWED_EXACT_INDEX_FIELD_NAMES.contains(fieldName) || fieldName.startsWith("metadata_flattened."); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java index 4d7cc9d978cd4..29bf3ca5dd045 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java @@ -7,9 +7,15 @@ package org.elasticsearch.xpack.security.support; +import org.elasticsearch.common.regex.Regex; + +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; + /** * A class to translate query level field names to index level field names. */ @@ -20,12 +26,15 @@ public class ApiKeyFieldNameTranslators { FIELD_NAME_TRANSLATORS = List.of( new ExactFieldNameTranslator(s -> "creator.principal", "username"), new ExactFieldNameTranslator(s -> "creator.realm", "realm_name"), - new ExactFieldNameTranslator(Function.identity(), "name"), + new ExactFieldNameTranslator(s -> "name", "name"), + new ExactFieldNameTranslator(s -> API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "type"), new ExactFieldNameTranslator(s -> "creation_time", "creation"), new ExactFieldNameTranslator(s -> "expiration_time", "expiration"), new ExactFieldNameTranslator(s -> "api_key_invalidated", "invalidated"), new ExactFieldNameTranslator(s -> "invalidation_time", "invalidation"), - new PrefixFieldNameTranslator(s -> "metadata_flattened" + s.substring(8), "metadata.") + // allows querying on all metadata values as keywords because "metadata_flattened" is a flattened field type + new ExactFieldNameTranslator(s -> "metadata_flattened", "metadata"), + new PrefixFieldNameTranslator(s -> "metadata_flattened." + s.substring("metadata.".length()), "metadata.") ); } @@ -34,6 +43,9 @@ public class ApiKeyFieldNameTranslators { * It throws an exception if the field name is not explicitly allowed. */ public static String translate(String fieldName) { + if (Regex.isSimpleMatchPattern(fieldName)) { + throw new IllegalArgumentException("Field name pattern [" + fieldName + "] is not allowed for API Key query"); + } for (FieldNameTranslator translator : FIELD_NAME_TRANSLATORS) { if (translator.supports(fieldName)) { return translator.translate(fieldName); @@ -42,6 +54,25 @@ public static String translate(String fieldName) { throw new IllegalArgumentException("Field [" + fieldName + "] is not allowed for API Key query"); } + /** + * Translates a query level field name pattern to the matching index level field names. + * The result can be the empty set, if the pattern doesn't match any of the allowed index level field names. + * If the pattern is actually a concrete field name rather than a pattern, + * it is also translated, but only if the query level field name is allowed, otherwise an exception is thrown. + */ + public static Set translatePattern(String fieldNameOrPattern) { + Set indexFieldNames = new HashSet<>(); + for (FieldNameTranslator translator : FIELD_NAME_TRANSLATORS) { + if (translator.supports(fieldNameOrPattern)) { + indexFieldNames.add(translator.translate(fieldNameOrPattern)); + } + } + // It's OK to "translate" to the empty set the concrete disallowed or unknown field names, because + // the SimpleQueryString query type is lenient in the sense that it ignores unknown fields and field name patterns, + // so this preprocessing can ignore them too. + return indexFieldNames; + } + abstract static class FieldNameTranslator { private final Function translationFunc; @@ -66,8 +97,12 @@ static class ExactFieldNameTranslator extends FieldNameTranslator { } @Override - public boolean supports(String fieldName) { - return name.equals(fieldName); + public boolean supports(String fieldNameOrPattern) { + if (Regex.isSimpleMatchPattern(fieldNameOrPattern)) { + return Regex.simpleMatch(fieldNameOrPattern, name); + } else { + return name.equals(fieldNameOrPattern); + } } } @@ -80,8 +115,16 @@ static class PrefixFieldNameTranslator extends FieldNameTranslator { } @Override - boolean supports(String fieldName) { - return fieldName.startsWith(prefix); + boolean supports(String fieldNamePrefix) { + // a pattern can generally match a prefix in multiple ways + // moreover, it's not possible to iterate the concrete fields matching the prefix + if (Regex.isSimpleMatchPattern(fieldNamePrefix)) { + // this means that e.g. `metadata.*` and `metadata.x*` are expanded to the empty list, + // rather than be replaced with `metadata_flattened.*` and `metadata_flattened.x*` + // (but, in any case, `metadata_flattened.*` and `metadata.x*` are going to be ignored) + return false; + } + return fieldNamePrefix.startsWith(prefix); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java new file mode 100644 index 0000000000000..291d55b7b0837 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import java.util.List; +import java.util.function.Function; +import java.util.function.Predicate; + +public class SecurityIndexFieldNameTranslator { + + private final List fieldNameTranslators; + + public SecurityIndexFieldNameTranslator(List fieldNameTranslators) { + this.fieldNameTranslators = fieldNameTranslators; + } + + public String translate(String queryFieldName) { + for (FieldName fieldName : this.fieldNameTranslators) { + if (fieldName.supportsQueryName(queryFieldName)) { + return fieldName.indexFieldName(queryFieldName); + } + } + throw new IllegalArgumentException("Field [" + queryFieldName + "] is not allowed"); + } + + public boolean supportedIndexFieldName(String indexFieldName) { + for (FieldName fieldName : this.fieldNameTranslators) { + if (fieldName.supportsIndexName(indexFieldName)) { + return true; + } + } + return false; + } + + public static FieldName exact(String name) { + return exact(name, Function.identity()); + } + + public static FieldName exact(String name, Function translation) { + return new SecurityIndexFieldNameTranslator.ExactFieldName(name, translation); + } + + public abstract static class FieldName { + private final Function toIndexFieldName; + protected final Predicate validIndexNamePredicate; + + FieldName(Function toIndexFieldName, Predicate validIndexNamePredicate) { + this.toIndexFieldName = toIndexFieldName; + this.validIndexNamePredicate = validIndexNamePredicate; + } + + public abstract boolean supportsQueryName(String queryFieldName); + + public abstract boolean supportsIndexName(String indexFieldName); + + public String indexFieldName(String queryFieldName) { + return toIndexFieldName.apply(queryFieldName); + } + } + + private static class ExactFieldName extends FieldName { + private final String name; + + private ExactFieldName(String name, Function toIndexFieldName) { + super(toIndexFieldName, fieldName -> toIndexFieldName.apply(name).equals(fieldName)); + this.name = name; + } + + @Override + public boolean supportsQueryName(String queryFieldName) { + return queryFieldName.equals(name); + } + + @Override + public boolean supportsIndexName(String indexFieldName) { + return validIndexNamePredicate.test(indexFieldName); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java new file mode 100644 index 0000000000000..5d3824ab1f8ce --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.lucene.search.Query; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.PrefixQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.security.support.SecurityIndexFieldNameTranslator.exact; + +public class UserBoolQueryBuilder extends BoolQueryBuilder { + public static final SecurityIndexFieldNameTranslator USER_FIELD_NAME_TRANSLATOR = new SecurityIndexFieldNameTranslator( + List.of(exact("username"), exact("roles"), exact("full_name"), exact("email"), exact("enabled")) + ); + + private UserBoolQueryBuilder() {} + + public static UserBoolQueryBuilder build(QueryBuilder queryBuilder) { + UserBoolQueryBuilder userQueryBuilder = new UserBoolQueryBuilder(); + if (queryBuilder != null) { + QueryBuilder translaterdQueryBuilder = translateToUserQueryBuilder(queryBuilder); + userQueryBuilder.must(translaterdQueryBuilder); + } + userQueryBuilder.filter(QueryBuilders.termQuery("type", "user")); + + return userQueryBuilder; + } + + private static QueryBuilder translateToUserQueryBuilder(QueryBuilder qb) { + if (qb instanceof final BoolQueryBuilder query) { + final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() + .minimumShouldMatch(query.minimumShouldMatch()) + .adjustPureNegative(query.adjustPureNegative()); + query.must().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::must); + query.should().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::should); + query.mustNot().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::mustNot); + query.filter().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::filter); + return newQuery; + } else if (qb instanceof MatchAllQueryBuilder) { + return qb; + } else if (qb instanceof final TermQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.termQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + } else if (qb instanceof final ExistsQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.existsQuery(translatedFieldName); + } else if (qb instanceof final TermsQueryBuilder query) { + if (query.termsLookup() != null) { + throw new IllegalArgumentException("Terms query with terms lookup is not supported for User query"); + } + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.termsQuery(translatedFieldName, query.getValues()); + } else if (qb instanceof final PrefixQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.prefixQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + } else if (qb instanceof final WildcardQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .rewrite(query.rewrite()); + } else { + throw new IllegalArgumentException("Query type [" + qb.getName() + "] is not supported for User query"); + } + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + context.setAllowedFields(this::isIndexFieldNameAllowed); + return super.doToQuery(context); + } + + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + if (queryRewriteContext instanceof SearchExecutionContext) { + ((SearchExecutionContext) queryRewriteContext).setAllowedFields(this::isIndexFieldNameAllowed); + } + return super.doRewrite(queryRewriteContext); + } + + boolean isIndexFieldNameAllowed(String queryFieldName) { + // Type is needed to filter on user doc type + return queryFieldName.equals("type") || USER_FIELD_NAME_TRANSLATOR.supportedIndexFieldName(queryFieldName); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java index a2aa04e0f56c3..064c38557f6cf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.security; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -26,7 +27,9 @@ import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -53,7 +56,7 @@ public SecurityTransportXPackUsageAction( } @Override - protected List usageActions() { + protected List> usageActions() { return Collections.singletonList(XPackUsageFeatureAction.SECURITY); } } @@ -70,7 +73,7 @@ public SecurityTransportXPackInfoAction( } @Override - protected List infoActions() { + protected List> infoActions() { return Collections.singletonList(XPackInfoFeatureAction.SECURITY); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 1735b9443c78f..6cd12858a12c1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -806,6 +806,7 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc ActionModule actionModule = new ActionModule( settingsModule.getSettings(), TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()), + null, settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index a088e6c61822a..4127b8cdad32b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -34,6 +33,7 @@ import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -197,25 +197,30 @@ protected void SearchRequest searchRequest = (SearchRequest) request; searchRequests.add(searchRequest); final SearchHit[] hits = searchFunction.apply(searchRequest); - ActionListener.respondAndRelease( - listener, - (Response) new SearchResponse( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1, - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null - ) - ); + final var searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f); + try { + ActionListener.respondAndRelease( + listener, + (Response) new SearchResponse( + searchHits, + null, + null, + false, + false, + null, + 1, + "_scrollId1", + 1, + 1, + 0, + 1, + null, + null + ) + ); + } finally { + searchHits.decRef(); + } } else if (TransportSearchScrollAction.TYPE.name().equals(action.name())) { assertThat(request, instanceOf(SearchScrollRequest.class)); ActionListener.respondAndRelease( @@ -245,7 +250,7 @@ protected void listener.onResponse((Response) response); } else if (RefreshAction.NAME.equals(action.name())) { assertThat(request, instanceOf(RefreshRequest.class)); - listener.onResponse((Response) mock(RefreshResponse.class)); + listener.onResponse((Response) mock(BroadcastResponse.class)); } else { super.doExecute(action, request, listener); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java new file mode 100644 index 0000000000000..aa5f935998757 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.search.sort.SortMode; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Locale; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class TransportQueryUserActionTests extends ESTestCase { + private static final String[] allowedIndexFieldNames = new String[] { "username", "roles", "enabled" }; + + public void testTranslateFieldSortBuilders() { + final List fieldNames = List.of(allowedIndexFieldNames); + + final List originals = fieldNames.stream().map(this::randomFieldSortBuilderWithName).toList(); + + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); + TransportQueryUserAction.translateFieldSortBuilders(originals, searchSourceBuilder); + + IntStream.range(0, originals.size()).forEach(i -> { + final FieldSortBuilder original = originals.get(i); + final FieldSortBuilder translated = (FieldSortBuilder) searchSourceBuilder.sorts().get(i); + assertThat(original.getFieldName(), equalTo(translated.getFieldName())); + + assertThat(translated.order(), equalTo(original.order())); + assertThat(translated.missing(), equalTo(original.missing())); + assertThat(translated.unmappedType(), equalTo(original.unmappedType())); + assertThat(translated.getNumericType(), equalTo(original.getNumericType())); + assertThat(translated.getFormat(), equalTo(original.getFormat())); + assertThat(translated.sortMode(), equalTo(original.sortMode())); + }); + } + + public void testNestedSortingIsNotAllowed() { + final FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("roles"); + fieldSortBuilder.setNestedSort(new NestedSortBuilder("something")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportQueryUserAction.translateFieldSortBuilders(List.of(fieldSortBuilder), SearchSourceBuilder.searchSource()) + ); + assertThat(e.getMessage(), equalTo("nested sorting is not supported for User query")); + } + + public void testNestedSortingOnTextFieldsNotAllowed() { + String fieldName = randomFrom("full_name", "email"); + final List fieldNames = List.of(fieldName); + final List originals = fieldNames.stream().map(this::randomFieldSortBuilderWithName).toList(); + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); + + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportQueryUserAction.translateFieldSortBuilders(originals, searchSourceBuilder) + ); + assertThat(e.getMessage(), equalTo(String.format(Locale.ROOT, "sorting is not supported for field [%s] in User query", fieldName))); + } + + private FieldSortBuilder randomFieldSortBuilderWithName(String name) { + final FieldSortBuilder fieldSortBuilder = new FieldSortBuilder(name); + fieldSortBuilder.order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC); + fieldSortBuilder.setFormat(randomBoolean() ? randomAlphaOfLengthBetween(3, 16) : null); + if (randomBoolean()) { + fieldSortBuilder.setNumericType(randomFrom("long", "double", "date", "date_nanos")); + } + if (randomBoolean()) { + fieldSortBuilder.missing(randomAlphaOfLengthBetween(3, 8)); + } + if (randomBoolean()) { + fieldSortBuilder.sortMode(randomFrom(SortMode.values())); + } + return fieldSortBuilder; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 9c48354b951d8..2438e625259d1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -627,21 +627,23 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException CapturingLogger.output(logger.getName(), Level.INFO).clear(); final String keyId = randomAlphaOfLength(10); + final TimeValue newExpiration = randomFrom(ApiKeyTests.randomFutureExpirationTime(), null); final var updateApiKeyRequest = new UpdateApiKeyRequest( keyId, randomBoolean() ? null : keyRoleDescriptors, metadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + newExpiration ); auditTrail.accessGranted(requestId, authentication, UpdateApiKeyAction.NAME, updateApiKeyRequest, authorizationInfo); final var expectedUpdateKeyAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikey":{"id":"%s","type":"rest"%s%s}}\ + "change":{"apikey":{"id":"%s","type":"rest"%s%s,"expiration":%s}}\ """, keyId, updateApiKeyRequest.getRoleDescriptors() == null ? "" : "," + roleDescriptorsStringBuilder, - updateApiKeyRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", metadataWithSerialization.serialization()) + updateApiKeyRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", metadataWithSerialization.serialization()), + updateApiKeyRequest.getExpiration() == null ? null : Strings.format("\"%s\"", newExpiration) ); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); @@ -664,13 +666,13 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException keyIds, randomBoolean() ? null : keyRoleDescriptors, metadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + null ); auditTrail.accessGranted(requestId, authentication, BulkUpdateApiKeyAction.NAME, bulkUpdateApiKeyRequest, authorizationInfo); final var expectedBulkUpdateKeyAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikeys":{"ids":[%s],"type":"rest"%s%s}}\ + "change":{"apikeys":{"ids":[%s],"type":"rest"%s%s,"expiration":null}}\ """, bulkUpdateApiKeyRequest.getIds().stream().map(s -> Strings.format("\"%s\"", s)).collect(Collectors.joining(",")), bulkUpdateApiKeyRequest.getRoleDescriptors() == null ? "" : "," + roleDescriptorsStringBuilder, @@ -875,22 +877,24 @@ public void testSecurityConfigChangeEventForCrossClusterApiKeys() throws IOExcep updateMetadataWithSerialization = randomApiKeyMetadataWithSerialization(); } + final TimeValue newExpiration = randomFrom(ApiKeyTests.randomFutureExpirationTime(), null); final var updateRequest = new UpdateCrossClusterApiKeyRequest( createRequest.getId(), updateAccess, updateMetadataWithSerialization.metadata(), - ApiKeyTests.randomFutureExpirationTime() + newExpiration ); auditTrail.accessGranted(requestId, authentication, UpdateCrossClusterApiKeyAction.NAME, updateRequest, authorizationInfo); final String expectedUpdateAuditEventString = String.format( Locale.ROOT, """ - "change":{"apikey":{"id":"%s","type":"cross_cluster"%s%s}}\ + "change":{"apikey":{"id":"%s","type":"cross_cluster"%s%s,"expiration":%s}}\ """, createRequest.getId(), updateAccess == null ? "" : ",\"role_descriptors\":" + accessWithSerialization.serialization(), - updateRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", updateMetadataWithSerialization.serialization()) + updateRequest.getMetadata() == null ? "" : Strings.format(",\"metadata\":%s", updateMetadataWithSerialization.serialization()), + newExpiration == null ? null : String.format(Locale.ROOT, "\"%s\"", newExpiration) ); output = CapturingLogger.output(logger.getName(), Level.INFO); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index b921fef9fd917..ac11dee8d4a48 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -405,7 +405,7 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(new SearchRequestBuilder(client)); doAnswer(invocation -> { final var listener = (ActionListener) invocation.getArguments()[1]; - final var searchHit = new SearchHit(docId, apiKeyId); + final var searchHit = SearchHit.unpooled(docId, apiKeyId); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(buildApiKeySourceDoc("some_hash".toCharArray())); searchHit.sourceRef(BytesReference.bytes(builder)); @@ -413,7 +413,7 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( + SearchHits.unpooled( new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), randomFloat(), @@ -758,7 +758,7 @@ public void testCrossClusterApiKeyUsageStats() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( + SearchHits.unpooled( searchHits.toArray(SearchHit[]::new), new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), randomFloat(), @@ -825,7 +825,7 @@ private SearchHit searchHitForCrossClusterApiKey(int crossClusterAccessLevel) { }; final int docId = randomIntBetween(0, Integer.MAX_VALUE); final String apiKeyId = randomAlphaOfLength(20); - final var searchHit = new SearchHit(docId, apiKeyId); + final var searchHit = SearchHit.unpooled(docId, apiKeyId); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.format(""" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index c524847e9dbbb..3c6f7462c0bb4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -265,6 +265,7 @@ public void init() throws Exception { client = mock(Client.class); threadPool = new ThreadPool( settings, + MeterRegistry.NOOP, new FixedExecutorBuilder( settings, THREAD_POOL_NAME, @@ -575,7 +576,8 @@ public void testAuthenticateSmartRealmOrdering() { }, this::logAndFail)); verify(auditTrail).authenticationFailed(reqId.get(), firstRealm.name(), token, "_action", transportRequest); - verify(firstRealm, times(2)).name(); // used above one time + verify(firstRealm, times(4)).name(); // used above one time plus two times for authc result and time metrics + verify(firstRealm, times(2)).type(); // used two times to collect authc result and time metrics verify(secondRealm, times(2)).realmRef(); // also used in license tracking verify(firstRealm, times(2)).token(threadContext); verify(secondRealm, times(2)).token(threadContext); @@ -583,6 +585,8 @@ public void testAuthenticateSmartRealmOrdering() { verify(secondRealm, times(2)).supports(token); verify(firstRealm).authenticate(eq(token), anyActionListener()); verify(secondRealm, times(2)).authenticate(eq(token), anyActionListener()); + verify(secondRealm, times(4)).name(); // called two times for every authenticate call to collect authc result and time metrics + verify(secondRealm, times(4)).type(); // called two times for every authenticate call to collect authc result and time metrics verifyNoMoreInteractions(auditTrail, firstRealm, secondRealm); // Now assume some change in the backend system so that 2nd realm no longer has the user, but the 1st realm does. @@ -711,7 +715,8 @@ public void testAuthenticateSmartRealmOrderingDisabled() { verify(operatorPrivilegesService).maybeMarkOperatorUser(eq(result), eq(threadContext)); }, this::logAndFail)); verify(auditTrail, times(2)).authenticationFailed(reqId.get(), firstRealm.name(), token, "_action", transportRequest); - verify(firstRealm, times(3)).name(); // used above one time + verify(firstRealm, times(7)).name(); // used above one time plus two times for every call to collect success and time metrics + verify(firstRealm, times(4)).type(); // used two times for every call to collect authc result and time metrics verify(secondRealm, times(2)).realmRef(); verify(firstRealm, times(2)).token(threadContext); verify(secondRealm, times(2)).token(threadContext); @@ -719,6 +724,8 @@ public void testAuthenticateSmartRealmOrderingDisabled() { verify(secondRealm, times(2)).supports(token); verify(firstRealm, times(2)).authenticate(eq(token), anyActionListener()); verify(secondRealm, times(2)).authenticate(eq(token), anyActionListener()); + verify(secondRealm, times(4)).name(); // called two times for every authenticate call to collect authc result and time metrics + verify(secondRealm, times(4)).type(); // called two times for every authenticate call to collect authc result and time metrics verifyNoMoreInteractions(auditTrail, firstRealm, secondRealm); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java new file mode 100644 index 0000000000000..e977c32565893 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/OAuth2TokenAuthenticatorTests.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.support.BearerToken; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; + +import java.time.Clock; +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OAuth2TokenAuthenticatorTests extends AbstractAuthenticatorTests { + + public void testRecordingSuccessfulAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final TokenService tokenService = mock(TokenService.class); + final OAuth2TokenAuthenticator oauth2Authenticator = new OAuth2TokenAuthenticator( + tokenService, + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); + + final BearerToken bearerToken = randomBearerToken(); + final Authenticator.Context context = mockAuthenticatorContext(bearerToken); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener listener = invocation.getArgument(1); + final Authentication authentication = AuthenticationTestHelper.builder() + .user(AuthenticationTestHelper.randomUser()) + .realmRef(AuthenticationTestHelper.randomRealmRef()) + .build(false); + final int seconds = randomIntBetween(0, Math.toIntExact(TimeValue.timeValueMinutes(30L).getSeconds())); + final Instant expirationTime = Clock.systemUTC().instant().plusSeconds(seconds); + final UserToken userToken = new UserToken(authentication, expirationTime); + listener.onResponse(userToken); + return Void.TYPE; + }).when(tokenService).tryAuthenticateToken(any(SecureString.class), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + oauth2Authenticator.authenticate(context, future); + var authResult = future.actionGet(); + assertThat(authResult.isAuthenticated(), equalTo(true)); + + // verify we recorded success metric + assertSingleSuccessAuthMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, Map.of()); + + // verify that there were no failures recorded + assertZeroFailedAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, executionTimeInNanos, Map.of()); + } + + public void testRecordingFailedAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final TokenService tokenService = mock(TokenService.class); + final OAuth2TokenAuthenticator oauth2Authenticator = new OAuth2TokenAuthenticator( + tokenService, + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); + + final BearerToken bearerToken = randomBearerToken(); + final Authenticator.Context context = mockAuthenticatorContext(bearerToken); + + var failureError = new ElasticsearchSecurityException("failed to authenticate OAuth2 token", RestStatus.UNAUTHORIZED); + when(context.getRequest().exceptionProcessingRequest(same(failureError), any())).thenReturn(failureError); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener listener = invocation.getArgument(1); + listener.onFailure(failureError); + return Void.TYPE; + }).when(tokenService).tryAuthenticateToken(any(SecureString.class), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + oauth2Authenticator.authenticate(context, future); + var e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e, sameInstance(failureError)); + + // verify we recorded failure metric + assertSingleFailedAuthMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_OAUTH2_TOKEN, + Map.ofEntries(Map.entry(OAuth2TokenAuthenticator.ATTRIBUTE_AUTHC_FAILURE_REASON, "failed to authenticate OAuth2 token")) + ); + + // verify that there were no successes recorded + assertZeroSuccessAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, SecurityMetricType.AUTHC_OAUTH2_TOKEN, executionTimeInNanos, Map.of()); + } + + private static BearerToken randomBearerToken() { + return new BearerToken(new SecureString(randomAlphaOfLengthBetween(5, 10).toCharArray())); + } + + private Authenticator.Context mockAuthenticatorContext(BearerToken token) { + final Authenticator.Context context = mock(Authenticator.Context.class); + when(context.getMostRecentAuthenticationToken()).thenReturn(token); + when(context.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class); + when(context.getRequest()).thenReturn(auditableRequest); + return context; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java index 0309404fee84d..b62fc4ab6b04d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; @@ -28,24 +28,28 @@ import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; import org.junit.Before; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class RealmsAuthenticatorTests extends ESTestCase { +public class RealmsAuthenticatorTests extends AbstractAuthenticatorTests { private ThreadContext threadContext; private Realms realms; @@ -63,6 +67,8 @@ public class RealmsAuthenticatorTests extends ESTestCase { private Cache lastSuccessfulAuthCache; private String nodeName; private RealmsAuthenticator realmsAuthenticator; + private TestTelemetryPlugin telemetryPlugin; + private TestNanoTimeSupplier nanoTimeSupplier; @SuppressWarnings("unchecked") @Before @@ -101,7 +107,14 @@ public void init() throws Exception { numInvalidation = new AtomicLong(); lastSuccessfulAuthCache = mock(Cache.class); - realmsAuthenticator = new RealmsAuthenticator(numInvalidation, lastSuccessfulAuthCache); + telemetryPlugin = new TestTelemetryPlugin(); + nanoTimeSupplier = new TestNanoTimeSupplier(randomLongBetween(0, 100)); + realmsAuthenticator = new RealmsAuthenticator( + numInvalidation, + lastSuccessfulAuthCache, + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); } public void testExtractCredentials() { @@ -258,6 +271,107 @@ public void testEmptyRunAsUsernameWillFail() { assertThat(expectThrows(ElasticsearchSecurityException.class, future::actionGet), is(e)); } + public void testRecodingSuccessfulAuthenticationMetrics() { + when(lastSuccessfulAuthCache.get(username)).thenReturn(randomFrom(realm1, realm2, null)); + final Realm successfulRealm = randomFrom(realm1, realm2); + when(successfulRealm.supports(authenticationToken)).thenReturn(true); + final long successfulExecutionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocationOnMock -> { + nanoTimeSupplier.advanceTime(successfulExecutionTimeInNanos); + @SuppressWarnings("unchecked") + final ActionListener> listener = (ActionListener>) invocationOnMock + .getArguments()[1]; + listener.onResponse(AuthenticationResult.success(user)); + return null; + }).when(successfulRealm).authenticate(eq(authenticationToken), any()); + + final Authenticator.Context context = createAuthenticatorContext(); + context.addAuthenticationToken(authenticationToken); + + final PlainActionFuture> future = new PlainActionFuture<>(); + realmsAuthenticator.authenticate(context, future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + + assertSingleSuccessAuthMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_REALMS, + Map.ofEntries( + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_NAME, successfulRealm.name()), + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_TYPE, successfulRealm.type()) + ) + ); + + assertZeroFailedAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_REALMS); + + assertAuthenticationTimeMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_REALMS, + successfulExecutionTimeInNanos, + Map.ofEntries( + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_NAME, successfulRealm.name()), + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_TYPE, successfulRealm.type()) + ) + ); + } + + public void testRecordingFailedAuthenticationMetric() { + when(lastSuccessfulAuthCache.get(username)).thenReturn(randomFrom(realm1, realm2, null)); + + final Realm unsuccessfulRealm; + if (randomBoolean()) { + when(realm1.supports(authenticationToken)).thenReturn(false); + unsuccessfulRealm = realm2; + } else { + when(realm2.supports(authenticationToken)).thenReturn(false); + unsuccessfulRealm = realm1; + } + + when(unsuccessfulRealm.supports(authenticationToken)).thenReturn(true); + final long unsuccessfulExecutionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocationOnMock -> { + nanoTimeSupplier.advanceTime(unsuccessfulExecutionTimeInNanos); + @SuppressWarnings("unchecked") + final ActionListener> listener = (ActionListener>) invocationOnMock + .getArguments()[1]; + listener.onResponse(AuthenticationResult.unsuccessful("unsuccessful realms authentication", null)); + return null; + }).when(unsuccessfulRealm).authenticate(eq(authenticationToken), any()); + + final Authenticator.Context context = createAuthenticatorContext(); + final ElasticsearchSecurityException exception = new ElasticsearchSecurityException("realms authentication failed"); + when(request.authenticationFailed(same(authenticationToken))).thenReturn(exception); + context.addAuthenticationToken(authenticationToken); + + final PlainActionFuture> future = new PlainActionFuture<>(); + realmsAuthenticator.authenticate(context, future); + var e = expectThrows(ElasticsearchSecurityException.class, () -> future.actionGet()); + assertThat(e, sameInstance(exception)); + + assertSingleFailedAuthMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_REALMS, + Map.ofEntries( + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_NAME, unsuccessfulRealm.name()), + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_TYPE, unsuccessfulRealm.type()), + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_AUTHC_FAILURE_REASON, "unsuccessful realms authentication") + ) + ); + + assertZeroSuccessAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_REALMS); + + assertAuthenticationTimeMetric( + telemetryPlugin, + SecurityMetricType.AUTHC_REALMS, + unsuccessfulExecutionTimeInNanos, + Map.ofEntries( + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_NAME, unsuccessfulRealm.name()), + Map.entry(RealmsAuthenticator.ATTRIBUTE_REALM_TYPE, unsuccessfulRealm.type()) + ) + ); + + } + private void configureRealmAuthResponse(Realm realm, AuthenticationResult authenticationResult) { doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 2f646631d14cd..adf0b44266260 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -62,6 +62,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -254,7 +255,7 @@ private static DiscoveryNode addAnotherPre8500DataNode(ClusterService clusterSer transportVersion = TransportVersions.V_8_8_1; } else { version = Version.V_8_9_0; - transportVersion = TransportVersions.V_8_500_020; + transportVersion = TransportVersions.V_8_9_X; } return addAnotherDataNodeWithVersion(clusterService, version, transportVersion); } @@ -269,6 +270,7 @@ public void tearDown() throws Exception { public static void startThreadPool() throws IOException { threadPool = new ThreadPool( settings, + MeterRegistry.NOOP, new FixedExecutorBuilder( settings, TokenService.THREAD_POOL_NAME, @@ -1235,9 +1237,9 @@ private void mockTokenForRefreshToken( assertThat(refreshFilter.fieldName(), is("refresh_token.token")); final SearchHits hits; if (storedRefreshToken.equals(refreshFilter.value())) { - SearchHit hit = new SearchHit(randomInt(), "token_" + userToken.getId()); + SearchHit hit = SearchHit.unpooled(randomInt(), "token_" + userToken.getId()); hit.sourceRef(docSource); - hits = new SearchHits(new SearchHit[] { hit }, null, 1); + hits = SearchHits.unpooled(new SearchHit[] { hit }, null, 1); } else { hits = SearchHits.EMPTY_WITH_TOTAL_HITS; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtTokenExtractionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtTokenExtractionTests.java index 8662561aca1ae..9d2e8228aedde 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtTokenExtractionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtTokenExtractionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.Realm; @@ -54,7 +55,11 @@ public void testRealmLetsThroughInvalidJWTs() { Realms realms = mock(Realms.class); // mock realm sits in-between when(realms.getActiveRealms()).thenReturn(List.of(jwtRealm1, mockRealm, jwtRealm2)); - RealmsAuthenticator realmsAuthenticator = new RealmsAuthenticator(mock(AtomicLong.class), (Cache) mock(Cache.class)); + RealmsAuthenticator realmsAuthenticator = new RealmsAuthenticator( + mock(AtomicLong.class), + (Cache) mock(Cache.class), + MeterRegistry.NOOP + ); final Authenticator.Context context = new Authenticator.Context( threadContext, mock(AuthenticationService.AuditableRequest.class), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index ecef71f1c4a68..61646f5ff375b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -344,6 +344,11 @@ public void testElasticEnterpriseSearchServerAccount() { assertThat(role.cluster().check(GetLifecycleAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(ILMActions.PUT.name(), request, authentication), is(true)); + // Connector secrets. Enterprise Search has read and write access. + assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/delete", request, authentication), is(true)); + assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/get", request, authentication), is(true)); + assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/post", request, authentication), is(true)); + List.of( "search-" + randomAlphaOfLengthBetween(1, 20), ".search-acl-filter-" + randomAlphaOfLengthBetween(1, 20), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java index 3a9fee4288bf2..33d3e6783b9e6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java @@ -263,7 +263,7 @@ public void testFindTokensFor() { if (r instanceof SearchRequest) { final SearchHit[] hits = IntStream.range(0, nhits) .mapToObj( - i -> new SearchHit( + i -> SearchHit.unpooled( randomIntBetween(0, Integer.MAX_VALUE), SERVICE_ACCOUNT_TOKEN_DOC_TYPE + "-" + accountId.asPrincipal() + "/" + tokenNames[i] ) @@ -272,7 +272,7 @@ public void testFindTokensFor() { ActionListener.respondAndRelease( l, new SearchResponse( - new SearchHits(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), + SearchHits.unpooled(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), null, null, false, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 169275ccc3ee3..a0008ba632151 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -346,7 +346,7 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi doAnswer(invocation -> { @SuppressWarnings("unchecked") final var listener = (ActionListener) invocation.getArguments()[1]; - final var searchHit = new SearchHit( + final var searchHit = SearchHit.unpooled( randomIntBetween(0, Integer.MAX_VALUE), NativeRoleMappingStore.getIdForName(mapping.getName()) ); @@ -357,14 +357,7 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( - new SearchHit[] { searchHit }, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - randomFloat(), - null, - null, - null - ), + SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), randomFloat()), null, null, false, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 0c2f9cefbcffb..ed1b5e6c7668b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -818,22 +818,12 @@ private SearchHit[] buildHits(List sourcePrivile } private static SearchResponse buildSearchResponse(SearchHit[] hits) { - return new SearchResponse( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1, - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null - ); + var searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f); + try { + return new SearchResponse(searchHits.asUnpooled(), null, null, false, false, null, 1, "_scrollId1", 1, 1, 0, 1, null, null); + } finally { + searchHits.decRef(); + } } private void handleBulkRequest(int expectedCount, Predicate> isCreated) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java index 2abbb6a610170..3a4e5a404eace 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.http.HttpInfo; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.node.Node; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -84,6 +85,7 @@ public static void startThreadPool() throws IOException { final Settings settings = Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "InternalEnrollmentTokenGeneratorTests").build(); threadPool = new ThreadPool( settings, + MeterRegistry.NOOP, new FixedExecutorBuilder( settings, TokenService.THREAD_POOL_NAME, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java index d3b46f5847636..0b0f8f8ddaae5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -23,10 +24,9 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestMatchers; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xcontent.DeprecationHandler; @@ -48,7 +48,6 @@ import org.elasticsearch.xpack.security.authz.restriction.WorkflowServiceTests.TestBaseRestHandler; import org.elasticsearch.xpack.security.operator.OperatorPrivileges; import org.junit.Before; -import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import java.util.Base64; @@ -72,8 +71,6 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -97,14 +94,7 @@ public void init() throws Exception { } private SecurityRestFilter getFilter(OperatorPrivileges.OperatorPrivilegesService privilegesService) { - return new SecurityRestFilter( - true, - threadContext, - secondaryAuthenticator, - new AuditTrailService(null, null), - restHandler, - privilegesService - ); + return new SecurityRestFilter(true, threadContext, secondaryAuthenticator, new AuditTrailService(null, null), privilegesService); } public void testProcess() throws Exception { @@ -119,8 +109,9 @@ public void testProcess() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq(httpRequest), anyActionListener()); - filter.handleRequest(request, channel, null); - verify(restHandler).handleRequest(request, channel, null); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); verifyNoMoreInteractions(channel); } @@ -150,19 +141,21 @@ public void testProcessSecondaryAuthentication() throws Exception { }).when(authcService).authenticate(eq(httpRequest), eq(false), anyActionListener()); SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); - AtomicReference secondaryAuthRef = new AtomicReference<>(); - doAnswer(i -> { - secondaryAuthRef.set(securityContext.getSecondaryAuthentication()); - return null; - }).when(restHandler).handleRequest(request, channel, null); final String credentials = randomAlphaOfLengthBetween(4, 8) + ":" + randomAlphaOfLengthBetween(4, 12); threadContext.putHeader( SecondaryAuthenticator.SECONDARY_AUTH_HEADER_NAME, "Basic " + Base64.getEncoder().encodeToString(credentials.getBytes(StandardCharset.UTF_8)) ); - filter.handleRequest(request, channel, null); - verify(restHandler).handleRequest(request, channel, null); + + AtomicReference secondaryAuthRef = new AtomicReference<>(); + ActionListener listener = ActionListener.wrap(proceed -> { + assertThat(proceed, is(Boolean.TRUE)); + secondaryAuthRef.set(securityContext.getSecondaryAuthentication()); + }, ex -> { throw new RuntimeException(ex); }); + + filter.intercept(request, channel, restHandler, listener); + verifyNoMoreInteractions(channel); assertThat(secondaryAuthRef.get(), notNullValue()); @@ -170,11 +163,13 @@ public void testProcessSecondaryAuthentication() throws Exception { } public void testProcessWithSecurityDisabled() throws Exception { - filter = new SecurityRestFilter(false, threadContext, secondaryAuthenticator, mock(AuditTrailService.class), restHandler, null); + filter = new SecurityRestFilter(false, threadContext, secondaryAuthenticator, mock(AuditTrailService.class), null); assertEquals(NOOP_OPERATOR_PRIVILEGES_SERVICE, filter.getOperatorPrivilegesService()); RestRequest request = mock(RestRequest.class); - filter.handleRequest(request, channel, null); - verify(restHandler).handleRequest(request, channel, null); + + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); verifyNoMoreInteractions(channel, authcService); } @@ -182,14 +177,14 @@ public void testProcessOptionsMethod() throws Exception { FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.OPTIONS).build(); when(channel.request()).thenReturn(request); when(channel.newErrorBuilder()).thenReturn(JsonXContent.contentBuilder()); - filter.handleRequest(request, channel, null); + + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + final ElasticsearchSecurityException ex = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(ex, TestMatchers.throwableWithMessage(containsString("Cannot dispatch OPTIONS request, as they are not authenticated"))); + verifyNoMoreInteractions(restHandler); verifyNoMoreInteractions(authcService); - ArgumentCaptor responseArgumentCaptor = ArgumentCaptor.forClass(RestResponse.class); - verify(channel).sendResponse(responseArgumentCaptor.capture()); - RestResponse restResponse = responseArgumentCaptor.getValue(); - assertThat(restResponse.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(restResponse.content().utf8ToString(), containsString("Cannot dispatch OPTIONS request, as they are not authenticated")); } public void testProcessFiltersBodyCorrectly() throws Exception { @@ -198,12 +193,9 @@ public void testProcessFiltersBodyCorrectly() throws Exception { XContentType.JSON ).build(); when(channel.request()).thenReturn(restRequest); - SetOnce handlerRequest = new SetOnce<>(); restHandler = new FilteredRestHandler() { @Override - public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - handlerRequest.set(request); - } + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) {} @Override public Set getFilteredFields() { @@ -222,28 +214,12 @@ public Set getFilteredFields() { threadContext, secondaryAuthenticator, new AuditTrailService(auditTrail, licenseState), - restHandler, NOOP_OPERATOR_PRIVILEGES_SERVICE ); - filter.handleRequest(restRequest, channel, null); - - assertEquals(restRequest, handlerRequest.get()); - assertEquals(restRequest.content(), handlerRequest.get().content()); - Map original; - try ( - var parser = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - handlerRequest.get().content().streamInput() - ) - ) { - original = parser.map(); - } - assertEquals(2, original.size()); - assertEquals(SecuritySettingsSourceField.TEST_PASSWORD, original.get("password")); - assertEquals("bar", original.get("foo")); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(restRequest, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); assertNotEquals(restRequest, auditTrailRequest.get()); assertNotEquals(restRequest.content(), auditTrailRequest.get().content()); @@ -284,7 +260,9 @@ public void testSanitizeHeaders() throws Exception { Set foundKeys = threadContext.getHeaders().keySet(); assertThat(foundKeys, hasItem(UsernamePasswordToken.BASIC_AUTH_HEADER)); - filter.handleRequest(request, channel, null); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); foundKeys = threadContext.getHeaders().keySet(); assertThat(foundKeys, not(hasItem(UsernamePasswordToken.BASIC_AUTH_HEADER))); @@ -296,10 +274,12 @@ public void testProcessWithWorkflow() throws Exception { restHandler = new TestBaseRestHandler(randomFrom(workflow.allowedRestHandlers())); final WorkflowService workflowService = new WorkflowService(); - filter = new SecurityRestFilter(true, threadContext, secondaryAuthenticator, new AuditTrailService(null, null), restHandler, null); + filter = new SecurityRestFilter(true, threadContext, secondaryAuthenticator, new AuditTrailService(null, null), null); RestRequest request = mock(RestRequest.class); - filter.handleRequest(request, channel, null); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); assertThat(WorkflowService.readWorkflowFromThreadContext(threadContext), equalTo(workflow.name())); } @@ -315,10 +295,12 @@ public void testProcessWithoutWorkflow() throws Exception { } final WorkflowService workflowService = new WorkflowService(); - filter = new SecurityRestFilter(true, threadContext, secondaryAuthenticator, new AuditTrailService(null, null), restHandler, null); + filter = new SecurityRestFilter(true, threadContext, secondaryAuthenticator, new AuditTrailService(null, null), null); RestRequest request = mock(RestRequest.class); - filter.handleRequest(request, channel, null); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + assertThat(future.get(), is(Boolean.TRUE)); assertThat(WorkflowService.readWorkflowFromThreadContext(threadContext), nullValue()); } @@ -354,11 +336,13 @@ public boolean checkRest( public void maybeInterceptRequest(ThreadContext threadContext, TransportRequest request) {} }); - filter.handleRequest(request, channel, null); + PlainActionFuture future = new PlainActionFuture<>(); + filter.intercept(request, channel, restHandler, future); + if (isOperator) { - verify(restHandler).handleRequest(request, channel, null); + assertThat(future.get(), is(Boolean.TRUE)); } else { - verify(restHandler, never()).handleRequest(request, channel, null); + assertThat(future.get(), is(Boolean.FALSE)); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java index 791aba46c92ea..0ab9533e62d4c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; @@ -54,7 +55,7 @@ public void setUp() throws Exception { .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index a1f696cc5dddd..2ee42b360f02a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; @@ -60,7 +61,7 @@ public void setUp() throws Exception { .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java index 3c0e24da32763..8bbd051c2fc32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; @@ -53,7 +54,7 @@ public void setUp() throws Exception { .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 67d2ab006eb22..4f14d8414ebca 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; @@ -58,7 +59,7 @@ public void setUp() throws Exception { .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java new file mode 100644 index 0000000000000..4a593eeb24ac6 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.user; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.PrefixQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.AbstractRestChannel; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; + +import java.util.List; + +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Mockito.mock; + +public class RestQueryUserActionTests extends ESTestCase { + + private final XPackLicenseState mockLicenseState = mock(XPackLicenseState.class); + + @Override + protected NamedXContentRegistry xContentRegistry() { + final SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + public void testQueryParsing() throws Exception { + final String query1 = """ + { + "query": { + "bool": { + "must": [ + { + "terms": { + "username": [ "bart", "homer" ] + } + } + ], + "should": [ { "prefix": { "username": "ba" } } ] + } + } + }"""; + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(query1), + XContentType.JSON + ).build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + + try (var threadPool = createThreadPool()) { + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + QueryUserRequest queryUserRequest = (QueryUserRequest) request; + final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder(); + assertNotNull(queryBuilder); + assertThat(queryBuilder.getClass(), is(BoolQueryBuilder.class)); + final BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; + assertTrue(boolQueryBuilder.filter().isEmpty()); + assertTrue(boolQueryBuilder.mustNot().isEmpty()); + assertThat(boolQueryBuilder.must(), hasSize(1)); + final QueryBuilder mustQueryBuilder = boolQueryBuilder.must().get(0); + assertThat(mustQueryBuilder.getClass(), is(TermsQueryBuilder.class)); + assertThat(((TermsQueryBuilder) mustQueryBuilder).fieldName(), equalTo("username")); + assertThat(boolQueryBuilder.should(), hasSize(1)); + final QueryBuilder shouldQueryBuilder = boolQueryBuilder.should().get(0); + assertThat(shouldQueryBuilder.getClass(), is(PrefixQueryBuilder.class)); + assertThat(((PrefixQueryBuilder) shouldQueryBuilder).fieldName(), equalTo("username")); + listener.onResponse((Response) new QueryUserResponse(0, List.of())); + } + }; + final RestQueryUserAction restQueryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState); + restQueryUserAction.handleRequest(restRequest, restChannel, client); + } + + assertNotNull(responseSetOnce.get()); + } + + public void testParsingSearchParameters() throws Exception { + final String requestBody = """ + { + "query": { + "match_all": {} + }, + "from": 42, + "size": 20, + "sort": [ "username", "full_name"], + "search_after": [ "bart" ] + }"""; + + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(requestBody), + XContentType.JSON + ).build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + + try (var threadPool = createThreadPool()) { + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + QueryUserRequest queryUserRequest = (QueryUserRequest) request; + final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder(); + assertNotNull(queryBuilder); + assertThat(queryBuilder.getClass(), is(MatchAllQueryBuilder.class)); + assertThat(queryUserRequest.getFrom(), equalTo(42)); + assertThat(queryUserRequest.getSize(), equalTo(20)); + final List fieldSortBuilders = queryUserRequest.getFieldSortBuilders(); + assertThat(fieldSortBuilders, hasSize(2)); + + assertThat(fieldSortBuilders.get(0), equalTo(new FieldSortBuilder("username"))); + assertThat(fieldSortBuilders.get(1), equalTo(new FieldSortBuilder("full_name"))); + + final SearchAfterBuilder searchAfterBuilder = queryUserRequest.getSearchAfterBuilder(); + assertThat(searchAfterBuilder, equalTo(new SearchAfterBuilder().setSortValues(new String[] { "bart" }))); + + listener.onResponse((Response) new QueryUserResponse(0, List.of())); + } + }; + + final RestQueryUserAction queryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState); + queryUserAction.handleRequest(restRequest, restChannel, client); + } + assertNotNull(responseSetOnce.get()); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java index 477409f22369f..4064d9f0ce4da 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java @@ -12,12 +12,14 @@ import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.MultiTermQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.SimpleQueryStringBuilder; import org.elasticsearch.index.query.SpanQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; @@ -29,17 +31,20 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTests; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction; import org.elasticsearch.xpack.security.authc.ApiKeyService; import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import static org.elasticsearch.test.LambdaMatchers.falseWith; import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators.FIELD_NAME_TRANSLATORS; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -57,7 +62,9 @@ public class ApiKeyBoolQueryBuilderTests extends ESTestCase { public void testBuildFromSimpleQuery() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; final QueryBuilder q1 = randomSimpleQuery("name"); - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, authentication); + final List queryFields = new ArrayList<>(); + final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication); + assertQueryFields(queryFields, q1, authentication); assertCommonFilterQueries(apiKeyQb1, authentication); final List mustQueries = apiKeyQb1.must(); assertThat(mustQueries, hasSize(1)); @@ -69,7 +76,9 @@ public void testBuildFromSimpleQuery() { public void testQueryForDomainAuthentication() { final Authentication authentication = AuthenticationTests.randomAuthentication(null, AuthenticationTests.randomRealmRef(true)); final QueryBuilder query = randomSimpleQuery("name"); - final ApiKeyBoolQueryBuilder apiKeysQuery = ApiKeyBoolQueryBuilder.build(query, authentication); + final List queryFields = new ArrayList<>(); + final ApiKeyBoolQueryBuilder apiKeysQuery = ApiKeyBoolQueryBuilder.build(query, queryFields::add, authentication); + assertQueryFields(queryFields, query, authentication); assertThat(apiKeysQuery.filter().get(0), is(QueryBuilders.termQuery("doc_type", "api_key"))); assertThat( apiKeysQuery.filter().get(1), @@ -102,18 +111,23 @@ public void testQueryForDomainAuthentication() { public void testBuildFromBoolQuery() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; + final List queryFields = new ArrayList<>(); final BoolQueryBuilder bq1 = QueryBuilders.boolQuery(); + boolean accessesNameField = false; if (randomBoolean()) { bq1.must(QueryBuilders.prefixQuery("name", "prod-")); + accessesNameField = true; } if (randomBoolean()) { bq1.should(QueryBuilders.wildcardQuery("name", "*-east-*")); + accessesNameField = true; } if (randomBoolean()) { bq1.filter( QueryBuilders.termsQuery("name", randomArray(3, 8, String[]::new, () -> "prod-" + randomInt() + "-east-" + randomInt())) ); + accessesNameField = true; } if (randomBoolean()) { bq1.mustNot(QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22)))); @@ -121,9 +135,18 @@ public void testBuildFromBoolQuery() { if (randomBoolean()) { bq1.minimumShouldMatch(randomIntBetween(1, 2)); } - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(bq1, authentication); + final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(bq1, queryFields::add, authentication); assertCommonFilterQueries(apiKeyQb1, authentication); + assertThat(queryFields, hasItem("doc_type")); + if (accessesNameField) { + assertThat(queryFields, hasItem("name")); + } + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertThat(apiKeyQb1.must(), hasSize(1)); assertThat(apiKeyQb1.should(), empty()); assertThat(apiKeyQb1.mustNot(), empty()); @@ -141,35 +164,158 @@ public void testFieldNameTranslation() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; // metadata - final String metadataKey = randomAlphaOfLengthBetween(3, 8); - final TermQueryBuilder q1 = QueryBuilders.termQuery("metadata." + metadataKey, randomAlphaOfLengthBetween(3, 8)); - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, authentication); - assertCommonFilterQueries(apiKeyQb1, authentication); - assertThat(apiKeyQb1.must().get(0), equalTo(QueryBuilders.termQuery("metadata_flattened." + metadataKey, q1.value()))); + { + List queryFields = new ArrayList<>(); + final String metadataKey = randomAlphaOfLengthBetween(3, 8); + final TermQueryBuilder q1 = QueryBuilders.termQuery("metadata." + metadataKey, randomAlphaOfLengthBetween(3, 8)); + ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("metadata_flattened." + metadataKey)); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb1, authentication); + assertThat(apiKeyQb1.must().get(0), equalTo(QueryBuilders.termQuery("metadata_flattened." + metadataKey, q1.value()))); + + queryFields = new ArrayList<>(); + String queryStringQuery = randomAlphaOfLength(8); + SimpleQueryStringBuilder q2 = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("metadata"); + apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("metadata_flattened")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb1, authentication); + assertThat( + apiKeyQb1.must().get(0), + equalTo(QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("metadata_flattened")) + ); + } // username - final PrefixQueryBuilder q2 = QueryBuilders.prefixQuery("username", randomAlphaOfLength(3)); - final ApiKeyBoolQueryBuilder apiKeyQb2 = ApiKeyBoolQueryBuilder.build(q2, authentication); - assertCommonFilterQueries(apiKeyQb2, authentication); - assertThat(apiKeyQb2.must().get(0), equalTo(QueryBuilders.prefixQuery("creator.principal", q2.value()))); + { + final List queryFields = new ArrayList<>(); + final PrefixQueryBuilder q2 = QueryBuilders.prefixQuery("username", randomAlphaOfLength(3)); + final ApiKeyBoolQueryBuilder apiKeyQb2 = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creator.principal")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb2, authentication); + assertThat(apiKeyQb2.must().get(0), equalTo(QueryBuilders.prefixQuery("creator.principal", q2.value()))); + } // realm name - final WildcardQueryBuilder q3 = QueryBuilders.wildcardQuery("realm_name", "*" + randomAlphaOfLength(3)); - final ApiKeyBoolQueryBuilder apiKeyQb3 = ApiKeyBoolQueryBuilder.build(q3, authentication); - assertCommonFilterQueries(apiKeyQb3, authentication); - assertThat(apiKeyQb3.must().get(0), equalTo(QueryBuilders.wildcardQuery("creator.realm", q3.value()))); + { + final List queryFields = new ArrayList<>(); + final WildcardQueryBuilder q3 = QueryBuilders.wildcardQuery("realm_name", "*" + randomAlphaOfLength(3)); + final ApiKeyBoolQueryBuilder apiKeyQb3 = ApiKeyBoolQueryBuilder.build(q3, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creator.realm")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + } + assertCommonFilterQueries(apiKeyQb3, authentication); + assertThat(apiKeyQb3.must().get(0), equalTo(QueryBuilders.wildcardQuery("creator.realm", q3.value()))); + } // creation_time - final TermQueryBuilder q4 = QueryBuilders.termQuery("creation", randomLongBetween(0, Long.MAX_VALUE)); - final ApiKeyBoolQueryBuilder apiKeyQb4 = ApiKeyBoolQueryBuilder.build(q4, authentication); - assertCommonFilterQueries(apiKeyQb4, authentication); - assertThat(apiKeyQb4.must().get(0), equalTo(QueryBuilders.termQuery("creation_time", q4.value()))); + { + final List queryFields = new ArrayList<>(); + final TermQueryBuilder q4 = QueryBuilders.termQuery("creation", randomLongBetween(0, Long.MAX_VALUE)); + final ApiKeyBoolQueryBuilder apiKeyQb4 = ApiKeyBoolQueryBuilder.build(q4, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creation_time")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb4, authentication); + assertThat(apiKeyQb4.must().get(0), equalTo(QueryBuilders.termQuery("creation_time", q4.value()))); + } // expiration_time - final TermQueryBuilder q5 = QueryBuilders.termQuery("expiration", randomLongBetween(0, Long.MAX_VALUE)); - final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, authentication); - assertCommonFilterQueries(apiKeyQb5, authentication); - assertThat(apiKeyQb5.must().get(0), equalTo(QueryBuilders.termQuery("expiration_time", q5.value()))); + { + final List queryFields = new ArrayList<>(); + final TermQueryBuilder q5 = QueryBuilders.termQuery("expiration", randomLongBetween(0, Long.MAX_VALUE)); + final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("expiration_time")); + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb5, authentication); + assertThat(apiKeyQb5.must().get(0), equalTo(QueryBuilders.termQuery("expiration_time", q5.value()))); + } + + // type + { + final List queryFields = new ArrayList<>(); + float fieldBoost = randomFloat(); + final SimpleQueryStringBuilder q5 = QueryBuilders.simpleQueryStringQuery("q=42").field("type", fieldBoost); + final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("runtime_key_type")); // "type" translation + if (authentication != null && authentication.isApiKey() == false) { + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + } + assertCommonFilterQueries(apiKeyQb5, authentication); + assertThat( + apiKeyQb5.must().get(0), + equalTo(QueryBuilders.simpleQueryStringQuery("q=42").field("runtime_key_type", fieldBoost)) + ); + } + + // test them all together + { + final List queryFields = new ArrayList<>(); + final SimpleQueryStringBuilder q6 = QueryBuilders.simpleQueryStringQuery("+OK -NOK maybe~3") + .field("username") + .field("realm_name") + .field("name") + .field("type") + .field("creation") + .field("expiration") + .field("invalidated") + .field("invalidation") + .field("metadata") + .field("metadata.inner"); + final ApiKeyBoolQueryBuilder apiKeyQb6 = ApiKeyBoolQueryBuilder.build(q6, queryFields::add, authentication); + assertThat(queryFields, hasItem("doc_type")); + assertThat(queryFields, hasItem("creator.principal")); + assertThat(queryFields, hasItem("creator.realm")); + assertThat(queryFields, hasItem("name")); + assertThat(queryFields, hasItem("runtime_key_type")); // "type" translation + assertThat(queryFields, hasItem("creation_time")); + assertThat(queryFields, hasItem("expiration_time")); + assertThat(queryFields, hasItem("api_key_invalidated")); + assertThat(queryFields, hasItem("invalidation_time")); + assertThat(queryFields, hasItem("metadata_flattened")); + assertThat(queryFields, hasItem("metadata_flattened.inner")); + assertCommonFilterQueries(apiKeyQb6, authentication); + assertThat( + apiKeyQb6.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery("+OK -NOK maybe~3") + .field("creator.principal") + .field("creator.realm") + .field("name") + .field("runtime_key_type") + .field("creation_time") + .field("expiration_time") + .field("api_key_invalidated") + .field("invalidation_time") + .field("metadata_flattened") + .field("metadata_flattened.inner") + ) + ); + } } public void testAllowListOfFieldNames() { @@ -191,16 +337,48 @@ public void testAllowListOfFieldNames() { "creator.metadata" ); - final QueryBuilder q1 = randomValueOtherThanMany( - q -> q.getClass() == IdsQueryBuilder.class || q.getClass() == MatchAllQueryBuilder.class, - () -> randomSimpleQuery(fieldName) - ); - final IllegalArgumentException e1 = expectThrows( - IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) - ); + { + final QueryBuilder q1 = randomValueOtherThanMany( + q -> q.getClass() == IdsQueryBuilder.class + || q.getClass() == MatchAllQueryBuilder.class + || q.getClass() == SimpleQueryStringBuilder.class, + () -> randomSimpleQuery(fieldName) + ); + final IllegalArgumentException e1 = expectThrows( + IllegalArgumentException.class, + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) + ); + assertThat(e1.getMessage(), containsString("Field [" + fieldName + "] is not allowed for API Key query")); + } - assertThat(e1.getMessage(), containsString("Field [" + fieldName + "] is not allowed for API Key query")); + // also wrapped in a boolean query + { + final QueryBuilder q1 = randomValueOtherThanMany( + q -> q.getClass() == IdsQueryBuilder.class + || q.getClass() == MatchAllQueryBuilder.class + || q.getClass() == SimpleQueryStringBuilder.class, + () -> randomSimpleQuery(fieldName) + ); + final BoolQueryBuilder q2 = QueryBuilders.boolQuery(); + if (randomBoolean()) { + if (randomBoolean()) { + q2.filter(q1); + } else { + q2.must(q1); + } + } else { + if (randomBoolean()) { + q2.should(q1); + } else { + q2.mustNot(q1); + } + } + IllegalArgumentException e2 = expectThrows( + IllegalArgumentException.class, + () -> ApiKeyBoolQueryBuilder.build(q2, ignored -> {}, authentication) + ); + assertThat(e2.getMessage(), containsString("Field [" + fieldName + "] is not allowed for API Key query")); + } } public void testTermsLookupIsNotAllowed() { @@ -208,7 +386,7 @@ public void testTermsLookupIsNotAllowed() { final TermsQueryBuilder q1 = QueryBuilders.termsLookupQuery("name", new TermsLookup("lookup", "1", "names")); final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("terms query with terms lookup is not supported for API Key query")); } @@ -218,7 +396,7 @@ public void testRangeQueryWithRelationIsNotAllowed() { final RangeQueryBuilder q1 = QueryBuilders.rangeQuery("creation").relation("contains"); final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("range query with relation is not supported for API Key query")); } @@ -231,7 +409,6 @@ public void testDisallowedQueryTypes() { QueryBuilders.constantScoreQuery(mock(QueryBuilder.class)), QueryBuilders.boostingQuery(mock(QueryBuilder.class), mock(QueryBuilder.class)), QueryBuilders.queryStringQuery("q=a:42"), - QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(5)), QueryBuilders.combinedFieldsQuery(randomAlphaOfLength(5)), QueryBuilders.disMaxQuery(), QueryBuilders.distanceFeatureQuery( @@ -266,14 +443,38 @@ public void testDisallowedQueryTypes() { final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, - () -> ApiKeyBoolQueryBuilder.build(q1, authentication) + () -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication) ); assertThat(e1.getMessage(), containsString("Query type [" + q1.getName() + "] is not supported for API Key query")); + + // also wrapped in a boolean query + { + final BoolQueryBuilder q2 = QueryBuilders.boolQuery(); + if (randomBoolean()) { + if (randomBoolean()) { + q2.filter(q1); + } else { + q2.must(q1); + } + } else { + if (randomBoolean()) { + q2.should(q1); + } else { + q2.mustNot(q1); + } + } + IllegalArgumentException e2 = expectThrows( + IllegalArgumentException.class, + () -> ApiKeyBoolQueryBuilder.build(q2, ignored -> {}, authentication) + ); + assertThat(e2.getMessage(), containsString("Query type [" + q1.getName() + "] is not supported for API Key query")); + } } public void testWillSetAllowedFields() throws IOException { final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build( randomSimpleQuery("name"), + ignored -> {}, randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null ); @@ -305,20 +506,243 @@ public void testWillFilterForApiKeyId() { new User(randomAlphaOfLengthBetween(5, 8)), apiKeyId ); - final ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(randomFrom(randomSimpleQuery("name"), null), authentication); + final ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build( + randomFrom(randomSimpleQuery("name"), null), + ignored -> {}, + authentication + ); assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.termQuery("doc_type", "api_key"))); assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.idsQuery().addIds(apiKeyId))); } + public void testSimpleQueryStringFieldPatternTranslation() { + String queryStringQuery = randomAlphaOfLength(8); + Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; + // no field translates to all the allowed fields + { + List queryFields = new ArrayList<>(); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat( + queryFields.subList(0, 9), + containsInAnyOrder( + "creator.principal", + "creator.realm", + "name", + "runtime_key_type", + "creation_time", + "expiration_time", + "api_key_invalidated", + "invalidation_time", + "metadata_flattened" + ) + ); + assertThat(queryFields.get(9), is("doc_type")); + assertThat( + apiKeyQb.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("creator.principal") + .field("creator.realm") + .field("name") + .field("runtime_key_type") + .field("creation_time") + .field("expiration_time") + .field("api_key_invalidated") + .field("invalidation_time") + .field("metadata_flattened") + .lenient(true) + ) + ); + } + // * matches all fields + { + List queryFields = new ArrayList<>(); + float fieldBoost = Math.abs(randomFloat()); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("*", fieldBoost); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat( + queryFields.subList(0, 9), + containsInAnyOrder( + "creator.principal", + "creator.realm", + "name", + "runtime_key_type", + "creation_time", + "expiration_time", + "api_key_invalidated", + "invalidation_time", + "metadata_flattened" + ) + ); + assertThat(queryFields.get(9), is("doc_type")); + assertThat( + apiKeyQb.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("creator.principal", fieldBoost) + .field("creator.realm", fieldBoost) + .field("name", fieldBoost) + .field("runtime_key_type", fieldBoost) + .field("creation_time", fieldBoost) + .field("expiration_time", fieldBoost) + .field("api_key_invalidated", fieldBoost) + .field("invalidation_time", fieldBoost) + .field("metadata_flattened", fieldBoost) + .lenient(true) + ) + ); + } + // pattern that matches a subset of fields + { + List queryFields = new ArrayList<>(); + float fieldBoost = Math.abs(randomFloat()); + boolean lenient = randomBoolean(); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("i*", fieldBoost).lenient(lenient); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat(queryFields.subList(0, 2), containsInAnyOrder("api_key_invalidated", "invalidation_time")); + assertThat(queryFields.get(2), is("doc_type")); + assertThat( + apiKeyQb.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("api_key_invalidated", fieldBoost) + .field("invalidation_time", fieldBoost) + .lenient(lenient) + ) + ); + } + // multi pattern that matches a subset of fields + { + List queryFields = new ArrayList<>(); + float boost1 = randomFrom(2.0f, 4.0f, 8.0f); + float boost2 = randomFrom(2.0f, 4.0f, 8.0f); + float boost3 = randomFrom(2.0f, 4.0f, 8.0f); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("i*", boost1) + .field("u*", boost2) + .field("user*", boost3); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat(queryFields.subList(0, 3), containsInAnyOrder("creator.principal", "api_key_invalidated", "invalidation_time")); + assertThat(queryFields.get(4), is("doc_type")); + assertThat( + apiKeyQb.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("api_key_invalidated", boost1) + .field("invalidation_time", boost1) + .field("creator.principal", boost2 * boost3) + .lenient(false) + ) + ); + + // wildcards don't expand under metadata.* + queryFields = new ArrayList<>(); + q = QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("rea*", boost1) + .field("t*", boost1) + .field("ty*", boost2) + .field("me*", boost2) + .field("metadata.*", boost3) + .field("metadata.x*", boost3); + apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat( + queryFields.subList(0, 4), + containsInAnyOrder("creator.realm", "runtime_key_type", "metadata_flattened", "runtime_key_type") + ); + assertThat(queryFields.get(4), is("doc_type")); + assertThat( + apiKeyQb.must().get(0), + equalTo( + QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("creator.realm", boost1) + .field("runtime_key_type", boost1 * boost2) + .field("metadata_flattened", boost2) + .lenient(false) + ) + ); + } + // patterns that don't match anything + { + List queryFields = new ArrayList<>(); + float boost1 = randomFrom(2.0f, 4.0f, 8.0f); + float boost2 = randomFrom(2.0f, 4.0f, 8.0f); + float boost3 = randomFrom(2.0f, 4.0f, 8.0f); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("field_that_does_not*", boost1) + .field("what*", boost2) + .field("aiaiaiai*", boost3); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat(queryFields.get(0), is("doc_type")); + if (authentication != null) { + assertThat(queryFields.get(1), is("creator.principal")); + assertThat(queryFields.get(2), is("creator.realm")); + assertThat(queryFields.size(), is(3)); + } else { + assertThat(queryFields.size(), is(1)); + } + assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder())); + } + // disallowed or unknown field is silently ignored + { + List queryFields = new ArrayList<>(); + float boost1 = randomFrom(2.0f, 4.0f, 8.0f); + float boost2 = randomFrom(2.0f, 4.0f, 8.0f); + SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field("field_that_does_not*", boost1) + .field("unknown_field", boost2); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication); + assertThat(queryFields.get(0), is("doc_type")); + if (authentication != null) { + assertThat(queryFields.get(1), is("creator.principal")); + assertThat(queryFields.get(2), is("creator.realm")); + assertThat(queryFields.size(), is(3)); + } else { + assertThat(queryFields.size(), is(1)); + } + assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder())); + + // translated field + queryFields = new ArrayList<>(); + String translatedField = randomFrom( + "creator.principal", + "creator.realm", + "runtime_key_type", + "creation_time", + "expiration_time", + "api_key_invalidated", + "invalidation_time", + "metadata_flattened" + ); + SimpleQueryStringBuilder q2 = QueryBuilders.simpleQueryStringQuery(queryStringQuery) + .field(translatedField, boost1) + .field("field_that_does_not*", boost2); + apiKeyQb = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication); + assertThat(queryFields.get(0), is("doc_type")); + if (authentication != null) { + assertThat(queryFields.get(1), is("creator.principal")); + assertThat(queryFields.get(2), is("creator.realm")); + assertThat(queryFields.size(), is(3)); + } else { + assertThat(queryFields.size(), is(1)); + } + + assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder())); + } + } + private void testAllowedIndexFieldName(Predicate predicate) { final String allowedField = randomFrom( "doc_type", "name", + "type", + TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD, "api_key_invalidated", "creation_time", "expiration_time", "metadata_flattened." + randomAlphaOfLengthBetween(1, 10), - "creator." + randomAlphaOfLengthBetween(1, 10) + "creator.principal", + "creator.realm" ); assertThat(predicate, trueWith(allowedField)); @@ -348,18 +772,34 @@ private void assertCommonFilterQueries(ApiKeyBoolQueryBuilder qb, Authentication ); } - private QueryBuilder randomSimpleQuery(String name) { - return switch (randomIntBetween(0, 7)) { - case 0 -> QueryBuilders.termQuery(name, randomAlphaOfLengthBetween(3, 8)); - case 1 -> QueryBuilders.termsQuery(name, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))); + private QueryBuilder randomSimpleQuery(String fieldName) { + return switch (randomIntBetween(0, 8)) { + case 0 -> QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)); + case 1 -> QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))); case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22))); - case 3 -> QueryBuilders.prefixQuery(name, "prod-"); - case 4 -> QueryBuilders.wildcardQuery(name, "prod-*-east-*"); + case 3 -> QueryBuilders.prefixQuery(fieldName, "prod-"); + case 4 -> QueryBuilders.wildcardQuery(fieldName, "prod-*-east-*"); case 5 -> QueryBuilders.matchAllQuery(); - case 6 -> QueryBuilders.existsQuery(name); - default -> QueryBuilders.rangeQuery(name) + case 6 -> QueryBuilders.existsQuery(fieldName); + case 7 -> QueryBuilders.rangeQuery(fieldName) .from(Instant.now().minus(1, ChronoUnit.DAYS).toEpochMilli(), randomBoolean()) .to(Instant.now().toEpochMilli(), randomBoolean()); + case 8 -> QueryBuilders.simpleQueryStringQuery("+rest key*") + .field(fieldName) + .lenient(randomBoolean()) + .analyzeWildcard(randomBoolean()); + default -> throw new IllegalStateException("illegal switch case"); }; } + + private void assertQueryFields(List actualQueryFields, QueryBuilder queryBuilder, Authentication authentication) { + assertThat(actualQueryFields, hasItem("doc_type")); + if ((queryBuilder instanceof IdsQueryBuilder || queryBuilder instanceof MatchAllQueryBuilder) == false) { + assertThat(actualQueryFields, hasItem("name")); + } + if (authentication != null && authentication.isApiKey() == false) { + assertThat(actualQueryFields, hasItem("creator.principal")); + assertThat(actualQueryFields, hasItem("creator.realm")); + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java new file mode 100644 index 0000000000000..460980d318786 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MultiTermQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.SpanQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.script.Script; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.IsIterableContaining.hasItem; +import static org.hamcrest.core.StringContains.containsString; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class UserBoolQueryBuilderTests extends ESTestCase { + private static final String[] allowedIndexFieldNames = new String[] { "username", "roles", "full_name", "email", "enabled" }; + + public void testBuildFromSimpleQuery() { + final QueryBuilder query = randomSimpleQuery(); + final UserBoolQueryBuilder userQueryBuilder = UserBoolQueryBuilder.build(query); + assertCommonFilterQueries(userQueryBuilder); + final List mustQueries = userQueryBuilder.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), equalTo(query)); + assertTrue(userQueryBuilder.should().isEmpty()); + assertTrue(userQueryBuilder.mustNot().isEmpty()); + } + + public void testBuildFromBoolQuery() { + final BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + if (randomBoolean()) { + boolQueryBuilder.must(QueryBuilders.prefixQuery(randomAllowedField(), "bar")); + } + if (randomBoolean()) { + boolQueryBuilder.should(QueryBuilders.wildcardQuery(randomAllowedField(), "*ar*")); + } + if (randomBoolean()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("roles", randomArray(3, 8, String[]::new, () -> "role-" + randomInt()))); + } + if (randomBoolean()) { + boolQueryBuilder.minimumShouldMatch(randomIntBetween(1, 2)); + } + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(boolQueryBuilder); + assertCommonFilterQueries(userBoolQueryBuilder); + + assertThat(userBoolQueryBuilder.must(), hasSize(1)); + assertThat(userBoolQueryBuilder.should(), empty()); + assertThat(userBoolQueryBuilder.mustNot(), empty()); + assertThat(userBoolQueryBuilder.filter(), hasItem(QueryBuilders.termQuery("type", "user"))); + assertThat(userBoolQueryBuilder.must().get(0).getClass(), is(BoolQueryBuilder.class)); + final BoolQueryBuilder translated = (BoolQueryBuilder) userBoolQueryBuilder.must().get(0); + assertThat(translated.must(), equalTo(boolQueryBuilder.must())); + assertThat(translated.should(), equalTo(boolQueryBuilder.should())); + assertThat(translated.mustNot(), equalTo(boolQueryBuilder.mustNot())); + assertThat(translated.minimumShouldMatch(), equalTo(boolQueryBuilder.minimumShouldMatch())); + assertThat(translated.filter(), equalTo(boolQueryBuilder.filter())); + } + + public void testFieldNameTranslation() { + String field = randomAllowedField(); + final WildcardQueryBuilder wildcardQueryBuilder = QueryBuilders.wildcardQuery(field, "*" + randomAlphaOfLength(3)); + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(wildcardQueryBuilder); + assertCommonFilterQueries(userBoolQueryBuilder); + assertThat(userBoolQueryBuilder.must().get(0), equalTo(QueryBuilders.wildcardQuery(field, wildcardQueryBuilder.value()))); + } + + public void testAllowListOfFieldNames() { + final String fieldName = randomValueOtherThanMany( + v -> Arrays.asList(allowedIndexFieldNames).contains(v), + () -> randomFrom(randomAlphaOfLengthBetween(3, 20), "type", "password") + ); + + // MatchAllQueryBuilder doesn't do any translation, so skip + final QueryBuilder q1 = randomValueOtherThanMany( + q -> q.getClass() == MatchAllQueryBuilder.class, + () -> randomSimpleQuery(fieldName) + ); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + + assertThat(exception.getMessage(), containsString("Field [" + fieldName + "] is not allowed")); + } + + public void testTermsLookupIsNotAllowed() { + final TermsQueryBuilder q1 = QueryBuilders.termsLookupQuery("roles", new TermsLookup("lookup", "1", "id")); + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + assertThat(e1.getMessage(), containsString("Terms query with terms lookup is not supported for User query")); + } + + public void testDisallowedQueryTypes() { + final AbstractQueryBuilder> q1 = randomFrom( + QueryBuilders.idsQuery(), + QueryBuilders.rangeQuery(randomAlphaOfLength(5)), + QueryBuilders.matchQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.constantScoreQuery(mock(QueryBuilder.class)), + QueryBuilders.boostingQuery(mock(QueryBuilder.class), mock(QueryBuilder.class)), + QueryBuilders.queryStringQuery("q=a:42"), + QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(5)), + QueryBuilders.combinedFieldsQuery(randomAlphaOfLength(5)), + QueryBuilders.disMaxQuery(), + QueryBuilders.distanceFeatureQuery( + randomAlphaOfLength(5), + mock(DistanceFeatureQueryBuilder.Origin.class), + randomAlphaOfLength(5) + ), + QueryBuilders.fieldMaskingSpanQuery(mock(SpanQueryBuilder.class), randomAlphaOfLength(5)), + QueryBuilders.functionScoreQuery(mock(QueryBuilder.class)), + QueryBuilders.fuzzyQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.wrapperQuery(randomAlphaOfLength(5)), + QueryBuilders.matchBoolPrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.matchPhraseQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.matchPhrasePrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.moreLikeThisQuery(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(5))), + QueryBuilders.regexpQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.spanTermQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.spanOrQuery(mock(SpanQueryBuilder.class)), + QueryBuilders.spanContainingQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)), + QueryBuilders.spanFirstQuery(mock(SpanQueryBuilder.class), randomIntBetween(1, 3)), + QueryBuilders.spanMultiTermQueryBuilder(mock(MultiTermQueryBuilder.class)), + QueryBuilders.spanNotQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)), + QueryBuilders.scriptQuery(new Script(randomAlphaOfLength(5))), + QueryBuilders.scriptScoreQuery(mock(QueryBuilder.class), new Script(randomAlphaOfLength(5))), + QueryBuilders.geoWithinQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoBoundingBoxQuery(randomAlphaOfLength(5)), + QueryBuilders.geoDisjointQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoDistanceQuery(randomAlphaOfLength(5)), + QueryBuilders.geoIntersectionQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoShapeQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)) + ); + + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + assertThat(e1.getMessage(), containsString("Query type [" + q1.getName() + "] is not supported for User query")); + } + + public void testWillSetAllowedFields() { + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(randomSimpleQuery()); + + final SearchExecutionContext context = mock(SearchExecutionContext.class); + doAnswer(invocationOnMock -> { + final Object[] args = invocationOnMock.getArguments(); + @SuppressWarnings("unchecked") + final Predicate predicate = (Predicate) args[0]; + assertTrue(predicate.getClass().getName().startsWith(UserBoolQueryBuilder.class.getName())); + testAllowedIndexFieldName(predicate); + return null; + }).when(context).setAllowedFields(any()); + try { + if (randomBoolean()) { + userBoolQueryBuilder.doToQuery(context); + } else { + userBoolQueryBuilder.doRewrite(context); + } + } catch (Exception e) { + // just ignore any exception from superclass since we only need verify the allowedFields are set + } finally { + verify(context).setAllowedFields(any()); + } + } + + private void testAllowedIndexFieldName(Predicate predicate) { + final String allowedField = randomAllowedField(); + assertTrue(predicate.test(allowedField)); + + final String disallowedField = randomBoolean() ? (randomAlphaOfLengthBetween(1, 3) + allowedField) : (allowedField.substring(1)); + assertFalse(predicate.test(disallowedField)); + } + + private void assertCommonFilterQueries(UserBoolQueryBuilder qb) { + final List tqb = qb.filter() + .stream() + .filter(q -> q.getClass() == TermQueryBuilder.class) + .map(q -> (TermQueryBuilder) q) + .toList(); + assertTrue(tqb.stream().anyMatch(q -> q.equals(QueryBuilders.termQuery("type", "user")))); + } + + private String randomAllowedField() { + return randomFrom(allowedIndexFieldNames); + } + + private QueryBuilder randomSimpleQuery() { + return randomSimpleQuery(randomAllowedField()); + } + + private QueryBuilder randomSimpleQuery(String fieldName) { + return randomFrom( + QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)), + QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))), + QueryBuilders.prefixQuery(fieldName, randomAlphaOfLength(randomIntBetween(3, 10))), + QueryBuilders.wildcardQuery(fieldName, "*" + randomAlphaOfLength(randomIntBetween(3, 10))), + QueryBuilders.matchAllQuery(), + QueryBuilders.existsQuery(fieldName) + ); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 46b0fac78ad8e..d49c1be8a7e0a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -492,7 +492,7 @@ public void testContextRestoreResponseHandler() throws Exception { threadContext.wrapRestorable(storedContext), new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -527,7 +527,7 @@ public void testContextRestoreResponseHandlerRestoreOriginalContext() throws Exc threadContext.newRestorableContext(true), new TransportResponseHandler.Empty() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -626,7 +626,7 @@ public void testSendWithCrossClusterAccessHeadersWithUnsupportedLicense() throws final AtomicReference actualException = new AtomicReference<>(); sender.sendRequest(connection, "action", mock(TransportRequest.class), null, new TransportResponseHandler<>() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -788,7 +788,7 @@ public void sendRequest( sender.sendRequest(connection, action, request, null, new TransportResponseHandler<>() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -974,7 +974,7 @@ public void sendRequest( final AtomicReference actualException = new AtomicReference<>(); sender.sendRequest(connection, "action", mock(TransportRequest.class), null, new TransportResponseHandler<>() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } @@ -1068,7 +1068,7 @@ public void sendRequest( final var actualException = new AtomicReference(); sender.sendRequest(connection, "action", mock(TransportRequest.class), null, new TransportResponseHandler<>() { @Override - public Executor executor(ThreadPool threadPool) { + public Executor executor() { return TransportResponseHandler.TRANSPORT_WORKER; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java index c87ddd116b138..8c422342c3640 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockSocket; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -77,7 +78,7 @@ public final class SecurityNetty4HeaderSizeLimitTests extends ESTestCase { @Before public void startThreadPool() { - threadPool = new ThreadPool(settings); + threadPool = new ThreadPool(settings, MeterRegistry.NOOP); TaskManager taskManager = new TaskManager(settings, threadPool, Collections.emptySet()); NetworkService networkService = new NetworkService(Collections.emptyList()); PageCacheRecycler recycler = new MockPageCacheRecycler(Settings.EMPTY); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 4b180548bfba4..a9d8c1dfc8d9e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; -import org.elasticsearch.http.netty4.Netty4HttpResponse; +import org.elasticsearch.http.netty4.Netty4FullHttpResponse; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; import org.elasticsearch.http.netty4.internal.HttpHeadersWithAuthenticationContext; @@ -398,7 +398,7 @@ public void testHttpHeaderAuthnBypassHeaderValidator() throws Exception { }); writeFuture.get(); ch.flushOutbound(); - Netty4HttpResponse response = ch.readOutbound(); + Netty4FullHttpResponse response = ch.readOutbound(); assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR)); String responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8); assertThat( @@ -501,7 +501,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th ch.flushInbound(); }).get(); ch.flushOutbound(); - Netty4HttpResponse response = ch.readOutbound(); + Netty4FullHttpResponse response = ch.readOutbound(); assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR)); var responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8); assertThat( diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 7a90907b9cf39..4c96cc77a8520 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; @@ -21,11 +20,12 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.junit.BeforeClass; +import org.junit.Before; import org.junit.ClassRule; import java.io.IOException; @@ -64,6 +64,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas .keystore("xpack.watcher.encryption_key", Resource.fromClasspath("system_key")) .keystore("xpack.security.transport.ssl.secure_key_passphrase", "testnode") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .build(); public FullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { @@ -87,11 +88,10 @@ protected Settings restClientSettings() { .build(); } - @BeforeClass - public static void checkClusterVersion() { + @Before + public void checkClusterVersion() { @UpdateForV9 // always true - var originalClusterSupportsShutdown = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_15_0)) - .orElse(true); + var originalClusterSupportsShutdown = oldClusterHasFeature(RestTestLegacyFeatures.SHUTDOWN_SUPPORTED); assumeTrue("no shutdown in versions before 7.15", originalClusterSupportsShutdown); } diff --git a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle index 4de49dc0b0770..32cab39f665d3 100644 --- a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle @@ -16,7 +16,6 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { testImplementation project(':x-pack:qa') - testImplementation project(':client:rest-high-level') } restResources { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 153945e933d77..75c36f063f805 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -23,7 +23,7 @@ public class DeleteShutdownNodeAction extends ActionType { public static final String NAME = "cluster:admin/shutdown/delete"; public DeleteShutdownNodeAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index 75afc9e0c05c5..b82e6a08fb269 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -35,7 +35,7 @@ public class GetShutdownStatusAction extends ActionType { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index 0e378eb196724..d05b60cd947f5 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -33,7 +33,7 @@ public class PutShutdownNodeAction extends ActionType { public static final String NAME = "cluster:admin/shutdown/create"; public PutShutdownNodeAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME); } public static class Request extends AcknowledgedRequest { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java index 8c85bdb11dfa2..234a77154a641 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -55,6 +56,7 @@ public Collection createComponents(PluginServices services) { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java index 4d275d87c6877..2d4aaada484ad 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java @@ -68,6 +68,7 @@ import static java.util.stream.Collectors.toMap; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; +import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentSLMMode; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -812,14 +813,9 @@ private UnassignedInfo makeUnassignedInfo(String nodeId) { private ShardRouting makeUnassignedShard(Index index, int shardId, String nodeId, boolean primary) { var unsignedInfo = makeUnassignedInfo(nodeId); - return TestShardRouting.newShardRouting( - new ShardId(index, shardId), - null, - null, - primary, - ShardRoutingState.UNASSIGNED, + return shardRoutingBuilder(new ShardId(index, shardId), null, primary, ShardRoutingState.UNASSIGNED).withUnassignedInfo( unsignedInfo - ); + ).build(); } private ShutdownShardMigrationStatus getUnassignedShutdownStatus(Index index, IndexMetadata imd, ShardRouting... shards) { diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index b02fe7cd44fbd..1f4b0c3b10c30 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -6,7 +6,6 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(xpackModule('slm')) - javaRestTestImplementation project(":client:rest-high-level") } File repoDir = file("$buildDir/testclusters/repo") diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 0233db5af081f..946d9c081658a 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -173,6 +174,7 @@ private static List xContentEntries() { @Override public List getRestHandlers( Settings unused, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java index 050562f0162c9..b1ec8f3a28f1b 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java @@ -58,7 +58,7 @@ public class TransportSLMGetExpiredSnapshotsAction extends TransportAction< TransportSLMGetExpiredSnapshotsAction.Request, TransportSLMGetExpiredSnapshotsAction.Response> { - public static final ActionType INSTANCE = ActionType.localOnly("cluster:admin/slm/execute/get_expired_snapshots"); + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/slm/execute/get_expired_snapshots"); private static final Logger logger = LogManager.getLogger(TransportSLMGetExpiredSnapshotsAction.class); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java index a6a208fbf3105..abb4dcbd9d3db 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.slm.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -34,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - GetSLMStatusAction.Request request = new GetSLMStatusAction.Request(); + AcknowledgedRequest.Plain request = new AcknowledgedRequest.Plain(); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(GetSLMStatusAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java index 56646800db871..fd7abbafe0425 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.slm.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -34,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - GetSnapshotLifecycleStatsAction.Request req = new GetSnapshotLifecycleStatsAction.Request(); + AcknowledgedRequest.Plain req = new AcknowledgedRequest.Plain(); req.timeout(request.paramAsTime("timeout", req.timeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSLMStatusAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSLMStatusAction.java index ddb78bd6c5053..747cc45749c4f 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSLMStatusAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSLMStatusAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -24,7 +25,7 @@ import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentSLMMode; -public class TransportGetSLMStatusAction extends TransportMasterNodeAction { +public class TransportGetSLMStatusAction extends TransportMasterNodeAction { @Inject public TransportGetSLMStatusAction( @@ -40,7 +41,7 @@ public TransportGetSLMStatusAction( clusterService, threadPool, actionFilters, - GetSLMStatusAction.Request::new, + AcknowledgedRequest.Plain::new, indexNameExpressionResolver, GetSLMStatusAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE @@ -50,7 +51,7 @@ public TransportGetSLMStatusAction( @Override protected void masterOperation( Task task, - GetSLMStatusAction.Request request, + AcknowledgedRequest.Plain request, ClusterState state, ActionListener listener ) { @@ -58,7 +59,7 @@ protected void masterOperation( } @Override - protected ClusterBlockException checkBlock(GetSLMStatusAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(AcknowledgedRequest.Plain request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSnapshotLifecycleStatsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSnapshotLifecycleStatsAction.java index d601c6bc2afb2..2cd8ac3012568 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSnapshotLifecycleStatsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/TransportGetSnapshotLifecycleStatsAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -25,7 +26,7 @@ import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleStatsAction; public class TransportGetSnapshotLifecycleStatsAction extends TransportMasterNodeAction< - GetSnapshotLifecycleStatsAction.Request, + AcknowledgedRequest.Plain, GetSnapshotLifecycleStatsAction.Response> { @Inject @@ -42,7 +43,7 @@ public TransportGetSnapshotLifecycleStatsAction( clusterService, threadPool, actionFilters, - GetSnapshotLifecycleStatsAction.Request::new, + AcknowledgedRequest.Plain::new, indexNameExpressionResolver, GetSnapshotLifecycleStatsAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE @@ -52,7 +53,7 @@ public TransportGetSnapshotLifecycleStatsAction( @Override protected void masterOperation( Task task, - GetSnapshotLifecycleStatsAction.Request request, + AcknowledgedRequest.Plain request, ClusterState state, ActionListener listener ) { @@ -65,7 +66,7 @@ protected void masterOperation( } @Override - protected ClusterBlockException checkBlock(GetSnapshotLifecycleStatsAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(AcknowledgedRequest.Plain request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java index 3541edfa20c93..9bbb08e89166e 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -454,13 +455,13 @@ public void testStoppedPriority() { ) ) ); - final SetOnce task = new SetOnce<>(); + final SetOnce task = new SetOnce<>(); ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { @Override public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { logger.info("--> got task: [source: {}]: {}", source, updateTask); - if (updateTask instanceof OperationModeUpdateTask) { - task.set(updateTask); + if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { + task.set(operationModeUpdateTask); } } }; @@ -476,7 +477,9 @@ public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask true ); service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); - assertThat(task.get(), equalTo(OperationModeUpdateTask.slmMode(OperationMode.STOPPED))); + assertEquals(task.get().priority(), Priority.IMMEDIATE); + assertNull(task.get().getILMOperationMode()); + assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); threadPool.shutdownNow(); } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java index adf5aec6a72ce..e79186d886925 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java @@ -154,6 +154,15 @@ public void testFailsOnChecksumMismatch() { final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); request.maxBlobSize(ByteSizeValue.ofBytes(10L)); request.abortWritePermitted(false); + // The analysis can perform writeAndOverwrite as a rare action. + // Since a read is performed towards the end of overwrite or write (rarely), + // it can return either the old (write) or the new (overwrite) content and both + // are considered to be correct. + // This test disrupts reads and relies on the disrupted content to be different from + // correct contents to trigger the expected failure. However, in rare cases, + // the disrupted old content could be identical to the new content or vice versa which + // leads to CI failures. Therefore, we disable rare actions to improve CI stability. + request.rareActionProbability(0.0); final CountDown countDown = new CountDown(between(1, request.getBlobCount())); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java index cad66019a3bbb..5e7f5dfdc855d 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java @@ -96,7 +96,7 @@ public class RepositoryAnalyzeAction extends HandledTransportAction INSTANCE = ActionType.localOnly("cluster:admin/repository/analyze"); + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/repository/analyze"); static final String UNCONTENDED_REGISTER_NAME_PREFIX = "test-register-uncontended-"; static final String CONTENDED_REGISTER_NAME_PREFIX = "test-register-contended-"; diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java index 96a4d05d2fb4b..fd8970f327ce9 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -36,6 +37,7 @@ public class SnapshotRepositoryTestKit extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java index eb9c1432c4775..4ab24c67ed955 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java @@ -61,6 +61,6 @@ public final String getWriteableName() { @Override public final TransportVersion getMinimalSupportedVersion() { - return TransportVersions.GENERIC_NAMED_WRITABLE_ADDED; + return TransportVersions.V_8_11_X; } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java index c1caa3cad9096..5682ed9f9e981 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java @@ -202,11 +202,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws */ public static CartesianPoint parsePoint(XContentParser parser, final boolean ignoreZValue) throws IOException, ElasticsearchParseException { - return cartesianPointParser.parsePoint(parser, ignoreZValue, value -> { - CartesianPoint point = new CartesianPoint(); - point.resetFromString(value, ignoreZValue); - return point; - }, value -> null); + return cartesianPointParser.parsePoint(parser, ignoreZValue, value -> new CartesianPoint().resetFromString(value, ignoreZValue)); } public static CartesianPoint parsePoint(Object value, boolean ignoreZValue) throws ElasticsearchParseException { @@ -244,7 +240,7 @@ public static void assertZValue(final boolean ignoreZValue, double zValue) { } } - private static GenericPointParser cartesianPointParser = new GenericPointParser<>("point", "x", "y", false) { + private static final GenericPointParser cartesianPointParser = new GenericPointParser<>("point", "x", "y") { @Override public void assertZValue(boolean ignoreZValue, double zValue) { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java index 0de11109e33e7..d940f366ef942 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java @@ -88,7 +88,7 @@ public InternalGeoLine(StreamInput in) throws IOException { this.includeSorts = in.readBoolean(); this.sortOrder = SortOrder.readFromStream(in); this.size = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { nonOverlapping = in.readBoolean(); simplified = in.readBoolean(); } else { @@ -105,7 +105,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBoolean(includeSorts); sortOrder.writeTo(out); out.writeVInt(size); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(nonOverlapping); out.writeBoolean(simplified); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java deleted file mode 100644 index ae8c878391405..0000000000000 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid; - -import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoGrid; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -public class ParsedGeoHexGrid extends ParsedGeoGrid { - - private static final ObjectParser PARSER = createParser( - ParsedGeoHexGrid::new, - ParsedGeoHexGridBucket::fromXContent, - ParsedGeoHexGridBucket::fromXContent - ); - - public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException { - ParsedGeoGrid aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; - } - - @Override - public String getType() { - return GeoHexGridAggregationBuilder.NAME; - } -} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java deleted file mode 100644 index 1383e46dcd9e5..0000000000000 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid; - -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.h3.H3; -import org.elasticsearch.h3.LatLng; -import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoGridBucket; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -public class ParsedGeoHexGridBucket extends ParsedGeoGridBucket { - - @Override - public GeoPoint getKey() { - LatLng latLng = H3.h3ToLatLng(hashAsString); - return new GeoPoint(latLng.getLatDeg(), latLng.getLonDeg()); - } - - @Override - public String getKeyAsString() { - return hashAsString; - } - - static ParsedGeoHexGridBucket fromXContent(XContentParser parser) throws IOException { - return parseXContent(parser, false, ParsedGeoHexGridBucket::new, (p, bucket) -> bucket.hashAsString = p.text()); - } -} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianBounds.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianBounds.java deleted file mode 100644 index 35537d5da95c4..0000000000000 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianBounds.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.search.aggregations.metrics; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.spatial.common.CartesianBoundingBox; -import org.elasticsearch.xpack.spatial.common.CartesianPoint; - -import java.io.IOException; - -import static org.elasticsearch.common.geo.GeoBoundingBox.BOTTOM_RIGHT_FIELD; -import static org.elasticsearch.common.geo.GeoBoundingBox.BOUNDS_FIELD; -import static org.elasticsearch.common.geo.GeoBoundingBox.TOP_LEFT_FIELD; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xpack.spatial.common.CartesianBoundingBox.X_FIELD; -import static org.elasticsearch.xpack.spatial.common.CartesianBoundingBox.Y_FIELD; - -public class ParsedCartesianBounds extends ParsedAggregation implements CartesianBounds { - - // A top of Double.NEGATIVE_INFINITY yields an empty xContent, so the bounding box is null - @Nullable - private CartesianBoundingBox boundingBox; - - @Override - public String getType() { - return CartesianBoundsAggregationBuilder.NAME; - } - - @Override - public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (boundingBox != null) { - builder.startObject(CartesianBoundingBox.BOUNDS_FIELD.getPreferredName()); - boundingBox.toXContentFragment(builder); - builder.endObject(); - } - return builder; - } - - @Override - @Nullable - public CartesianPoint topLeft() { - return boundingBox != null ? boundingBox.topLeft() : null; - } - - @Override - @Nullable - public CartesianPoint bottomRight() { - return boundingBox != null ? boundingBox.bottomRight() : null; - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedCartesianBounds.class.getSimpleName(), - true, - ParsedCartesianBounds::new - ); - - private static final ConstructingObjectParser, Void> BOUNDS_PARSER = - new ConstructingObjectParser<>( - ParsedCartesianBounds.class.getSimpleName() + "_BOUNDS", - true, - args -> new Tuple<>((CartesianPoint) args[0], (CartesianPoint) args[1]) - ); - - private static final ObjectParser POINT_PARSER = new ObjectParser<>( - ParsedCartesianBounds.class.getSimpleName() + "_POINT", - true, - CartesianPoint::new - ); - - static { - declareAggregationFields(PARSER); - PARSER.declareObject((agg, bbox) -> agg.boundingBox = new CartesianBoundingBox(bbox.v1(), bbox.v2()), BOUNDS_PARSER, BOUNDS_FIELD); - - BOUNDS_PARSER.declareObject(constructorArg(), POINT_PARSER, TOP_LEFT_FIELD); - BOUNDS_PARSER.declareObject(constructorArg(), POINT_PARSER, BOTTOM_RIGHT_FIELD); - - POINT_PARSER.declareDouble(CartesianPoint::resetY, Y_FIELD); - POINT_PARSER.declareDouble(CartesianPoint::resetX, X_FIELD); - } - - public static ParsedCartesianBounds fromXContent(XContentParser parser, final String name) { - ParsedCartesianBounds geoBounds = PARSER.apply(parser, null); - geoBounds.setName(name); - return geoBounds; - } - -} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianCentroid.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianCentroid.java deleted file mode 100644 index 60d4eaa96f847..0000000000000 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/ParsedCartesianCentroid.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.search.aggregations.metrics; - -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalCentroid; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.search.aggregations.metrics.InternalCartesianCentroid.Fields; - -import java.io.IOException; - -/** - * Serialization and merge logic for {@link CartesianCentroidAggregator}. - */ -public class ParsedCartesianCentroid extends ParsedAggregation implements CartesianCentroid { - private CartesianPoint centroid; - private long count; - - @Override - public CartesianPoint centroid() { - return centroid; - } - - @Override - public long count() { - return count; - } - - @Override - public String getType() { - return CartesianCentroidAggregationBuilder.NAME; - } - - @Override - public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (centroid != null) { - builder.startObject(InternalCentroid.Fields.CENTROID.getPreferredName()); - { - builder.field(Fields.CENTROID_X.getPreferredName(), centroid.getX()); - builder.field(Fields.CENTROID_Y.getPreferredName(), centroid.getY()); - } - builder.endObject(); - } - builder.field(InternalCentroid.Fields.COUNT.getPreferredName(), count); - return builder; - } - - private static final ObjectParser PARSER = new ObjectParser<>( - ParsedCartesianCentroid.class.getSimpleName(), - true, - ParsedCartesianCentroid::new - ); - - private static final ObjectParser CARTESIAN_POINT_PARSER = new ObjectParser<>( - ParsedCartesianCentroid.class.getSimpleName() + "_POINT", - true, - CartesianPoint::new - ); - - static { - declareAggregationFields(PARSER); - PARSER.declareObject((agg, centroid) -> agg.centroid = centroid, CARTESIAN_POINT_PARSER, InternalCentroid.Fields.CENTROID); - PARSER.declareLong((agg, count) -> agg.count = count, InternalCentroid.Fields.COUNT); - - CARTESIAN_POINT_PARSER.declareDouble(CartesianPoint::resetX, Fields.CENTROID_X); - CARTESIAN_POINT_PARSER.declareDouble(CartesianPoint::resetY, Fields.CENTROID_Y); - } - - public static ParsedCartesianCentroid fromXContent(XContentParser parser, final String name) { - ParsedCartesianCentroid centroid = PARSER.apply(parser, null); - centroid.setName(name); - return centroid; - } -} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index d9309cfb16a4c..b2b250c6d81bd 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -428,8 +428,10 @@ protected IngestScriptSupport ingestScriptSupport() { } @Override - protected boolean supportsColumnAtATimeReader(MapperService mapper, MappedFieldType ft) { - // Currently ESQL support for cartesian_point is limited to source values - return false; + protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) { + // TODO: Support testing both reading from source as well as reading from doc-values + MappedFieldType ft = mapper.fieldType(loaderFieldName); + PointFieldMapper.PointFieldType point = (PointFieldMapper.PointFieldType) ft; + return new BlockReaderSupport(point.isIndexed() == false && ft.hasDocValues(), false, mapper, loaderFieldName); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLineTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLineTests.java index 432138f8339ff..f88b9ae03e05f 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLineTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLineTests.java @@ -6,19 +6,13 @@ */ package org.elasticsearch.xpack.spatial.search.aggregations; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.SpatialPlugin; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -137,20 +131,4 @@ protected void assertReduced(InternalGeoLine reduced, List inpu assertArrayEquals(finalCappedSortVals, reduced.sortVals(), 0d); assertArrayEquals(finalCappedPoints, reduced.line()); } - - @Override - protected void assertFromXContent(InternalGeoLine aggregation, ParsedAggregation parsedAggregation) throws IOException { - // There is no ParsedGeoLine yet so we cannot test it here - } - - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(GeoLineAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedGeoLine yet", false); - return null; - }) - ); - } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java index 421e014452024..de7e47ec37d90 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java @@ -6,15 +6,11 @@ */ package org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.h3.H3; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridTestCase; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoGridBucket; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import java.util.List; @@ -27,18 +23,6 @@ protected SearchPlugin registerPlugin() { return new LocalStateSpatialPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(GeoHexGridAggregationBuilder.NAME), - (p, c) -> ParsedGeoHexGrid.fromXContent(p, (String) c) - ) - ); - } - @Override protected InternalGeoHexGrid createInternalGeoGrid( String name, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianBoundsTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianBoundsTests.java index eef718d302c5a..0dabcedf3fa2c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianBoundsTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianBoundsTests.java @@ -7,15 +7,10 @@ package org.elasticsearch.xpack.spatial.search.aggregations.metrics; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import java.util.HashMap; @@ -34,18 +29,6 @@ protected SearchPlugin registerPlugin() { return new LocalStateSpatialPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(CartesianBoundsAggregationBuilder.NAME), - (p, c) -> ParsedCartesianBounds.fromXContent(p, (String) c) - ) - ); - } - @Override protected InternalCartesianBounds createTestInstance(String name, Map metadata) { // we occasionally want to test top = Double.NEGATIVE_INFINITY since this triggers empty xContent object @@ -92,15 +75,6 @@ protected void assertSampled(InternalCartesianBounds sampled, InternalCartesianB assertValueClose(sampled.right, reduced.right); } - @Override - protected void assertFromXContent(InternalCartesianBounds aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedCartesianBounds); - ParsedCartesianBounds parsed = (ParsedCartesianBounds) parsedAggregation; - - assertEquals(aggregation.topLeft(), parsed.topLeft()); - assertEquals(aggregation.bottomRight(), parsed.bottomRight()); - } - @Override protected InternalCartesianBounds mutateInstance(InternalCartesianBounds instance) { String name = instance.getName(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java index d033b3e51e31b..f81be81a3ac01 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java @@ -8,18 +8,13 @@ package org.elasticsearch.xpack.spatial.search.aggregations.metrics; import org.elasticsearch.common.geo.SpatialPoint; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.InternalCentroid; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.common.CartesianPoint; @@ -40,18 +35,6 @@ protected SearchPlugin registerPlugin() { return new LocalStateSpatialPlugin(); } - @Override - protected List getNamedXContents() { - return CollectionUtils.appendToCopy( - super.getNamedXContents(), - new NamedXContentRegistry.Entry( - Aggregation.class, - new ParseField(CartesianCentroidAggregationBuilder.NAME), - (p, c) -> ParsedCartesianCentroid.fromXContent(p, (String) c) - ) - ); - } - @Override protected InternalCartesianCentroid createTestInstance(String name, Map metadata) { Point point = ShapeTestUtils.randomPoint(false); @@ -107,15 +90,6 @@ public void testReduceMaxCount() { assertThat(reducedCentroid.count(), equalTo(Long.MAX_VALUE)); } - @Override - protected void assertFromXContent(InternalCartesianCentroid aggregation, ParsedAggregation parsedAggregation) { - assertTrue(parsedAggregation instanceof ParsedCartesianCentroid); - ParsedCartesianCentroid parsed = (ParsedCartesianCentroid) parsedAggregation; - - assertEquals(aggregation.centroid(), parsed.centroid()); - assertEquals(aggregation.count(), parsed.count()); - } - @Override protected InternalCartesianCentroid mutateInstance(InternalCartesianCentroid instance) { double minValue = -1000000; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java index 5b0e8ce5a2d11..568fcc8275732 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java @@ -14,6 +14,6 @@ public class SqlClearCursorAction extends ActionType { public static final String NAME = "indices:data/read/sql/close_cursor"; private SqlClearCursorAction() { - super(NAME, SqlClearCursorResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java index d374356a81229..7640363721e91 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java @@ -14,6 +14,6 @@ public class SqlQueryAction extends ActionType { public static final String NAME = "indices:data/read/sql"; private SqlQueryAction() { - super(NAME, SqlQueryResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java index df6f202886ab7..5fefc30b33d25 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java @@ -17,6 +17,6 @@ public class SqlTranslateAction extends ActionType { public static final String NAME = "indices:data/read/sql/translate"; private SqlTranslateAction() { - super(NAME, SqlTranslateResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 3c8f614145719..a9849c974ad96 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -137,7 +137,6 @@ public boolean queryClose(String cursor, Mode mode) throws SQLException { return response.response().isSucceeded(); } - @SuppressWarnings({ "removal" }) private ResponseWithWarnings post( String path, Request request, @@ -165,7 +164,6 @@ private ResponseWithWarnings Response get(String path, CheckedFunction responseParser) throws SQLException { Tuple>, byte[]> response = java.security.AccessController.doPrivileged( (PrivilegedAction>, byte[]>>>) () -> JreHttpUrlConnection.http( diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java index f667ae4b80d03..7ad54901e2d06 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilter; @@ -271,7 +270,7 @@ protected TaskId cancelTaskWithXOpaqueId(String id, String action) { TaskId taskId = findTaskWithXOpaqueId(id, action); assertNotNull(taskId); logger.trace("Cancelling task " + taskId); - CancelTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); + ListTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); assertThat(response.getTasks(), hasSize(1)); assertThat(response.getTasks().get(0).action(), equalTo(action)); logger.trace("Task is cancelled " + taskId); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 79b3116bfa807..7a12b05f6b49f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -927,9 +927,7 @@ private static class HavingOverProject extends AnalyzerRule { protected LogicalPlan rule(Filter f) { if (f.child() instanceof Project p) { for (Expression n : p.projections()) { - if (n instanceof Alias) { - n = ((Alias) n).child(); - } + n = Alias.unwrap(n); // no literal or aggregates - it's a 'regular' projection if (n.foldable() == false && Functions.isAggregate(n) == false // folding might not work (it might wait for the optimizer) @@ -1014,12 +1012,7 @@ private static Set findMissingAggregate(Aggregate target, Expre Set missing = new LinkedHashSet<>(); for (Expression filterAgg : from.collect(Functions::isAggregate)) { - if (Expressions.anyMatch(target.aggregates(), a -> { - if (a instanceof Alias) { - a = ((Alias) a).child(); - } - return a.equals(filterAgg); - }) == false) { + if (Expressions.anyMatch(target.aggregates(), a -> Alias.unwrap(a).equals(filterAgg)) == false) { missing.add(Expressions.wrapAsNamed(filterAgg)); } } @@ -1066,12 +1059,7 @@ protected LogicalPlan rule(OrderBy ob) { List missing = new ArrayList<>(); for (Expression orderedAgg : aggs) { - if (Expressions.anyMatch(a.aggregates(), e -> { - if (e instanceof Alias) { - e = ((Alias) e).child(); - } - return e.equals(orderedAgg); - }) == false) { + if (Expressions.anyMatch(a.aggregates(), e -> Alias.unwrap(e).equals(orderedAgg)) == false) { missing.add(Expressions.wrapAsNamed(orderedAgg)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index c7826919c5999..4c6d06738e16f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -918,7 +918,7 @@ private static void checkPivot(LogicalPlan p, Set localFailures, Attrib DataType colType = pv.column().dataType(); for (NamedExpression v : pv.values()) { // check all values are foldable - Expression ex = v instanceof Alias ? ((Alias) v).child() : v; + Expression ex = Alias.unwrap(v); if (ex instanceof Literal == false) { localFailures.add(fail(v, "Non-literal [{}] found inside PIVOT values", v.name())); } else if (ex.foldable() && ex.fold() == null) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 936f4aa23cd57..1d7a3cdd836ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -214,7 +214,7 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfig } protected static void logSearchResponse(SearchResponse response, Logger logger) { - List aggs = Collections.emptyList(); + List aggs = Collections.emptyList(); if (response.getAggregations() != null) { aggs = response.getAggregations().asList(); } @@ -382,7 +382,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { throw new SqlIllegalArgumentException("No group-by/aggs defined"); } }); @@ -404,9 +404,9 @@ protected void handleResponse(SearchResponse response, ActionListener list logSearchResponse(response, log); } - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); if (aggs != null) { - Aggregation agg = aggs.get(Aggs.ROOT_GROUP_NAME); + InternalAggregation agg = aggs.get(Aggs.ROOT_GROUP_NAME); if (agg instanceof Filters filters) { handleBuckets(filters.getBuckets(), response); } else { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java index 36a42aaad7161..8fa41017762a7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java @@ -156,14 +156,12 @@ static void handle( logSearchResponse(response, log); } - SearchHit[] hits = response.getHits().getHits(); - SearchHitRowSet rowSet = makeRowSet.get(); if (rowSet.hasRemaining() == false) { closePointInTime(client, response.pointInTimeId(), listener.delegateFailureAndWrap((l, r) -> l.onResponse(Page.last(rowSet)))); } else { - updateSearchAfter(hits, source); + updateSearchAfter(response.getHits().getHits(), source); SearchHitCursor nextCursor = new SearchHitCursor( source, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index ba6a9854e4254..b6e3e8b759352 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -28,9 +28,8 @@ * Extracts rows from an array of {@link SearchHit}. */ class SearchHitRowSet extends ResultRowSet { - private final SearchHit[] hits; + private final SearchHits hits; private final Map> flatInnerHits = new HashMap<>(); - private final Set innerHits = new LinkedHashSet<>(); private final String innerHit; private final int size; @@ -42,13 +41,14 @@ class SearchHitRowSet extends ResultRowSet { SearchHitRowSet(List exts, BitSet mask, int sizeRequested, int limit, SearchResponse response) { super(exts, mask); - this.hits = response.getHits().getHits(); + this.hits = response.getHits().asUnpooled(); // Since the results might contain nested docs, the iteration is similar to that of Aggregation // namely it discovers the nested docs and then, for iteration, increments the deepest level first // and eventually carries that over to the top level String innerHit = null; + Set innerHits = new LinkedHashSet<>(); for (HitExtractor ex : exts) { if (ex.hitName() != null) { innerHits.add(ex.hitName()); @@ -58,7 +58,7 @@ class SearchHitRowSet extends ResultRowSet { } } - int sz = hits.length; + int sz = hits.getHits().length; int maxDepth = 0; if (innerHits.isEmpty() == false) { @@ -106,7 +106,7 @@ protected Object extractValue(HitExtractor e) { int extractorLevel = e.hitName() == null ? 0 : 1; SearchHit hit = null; - SearchHit[] sh = hits; + SearchHit[] sh = hits.getHits(); for (int lvl = 0; lvl <= extractorLevel; lvl++) { // TODO: add support for multi-nested doc if (hit != null) { @@ -172,7 +172,7 @@ protected boolean doNext() { // increment last row indexPerLevel[indexPerLevel.length - 1]++; // then check size - SearchHit[] sh = hits; + SearchHit[] sh = hits.getHits(); for (int lvl = 0; lvl < indexPerLevel.length; lvl++) { if (indexPerLevel[lvl] == sh.length) { // reset the current branch @@ -181,7 +181,7 @@ protected boolean doNext() { indexPerLevel[lvl - 1]++; // restart the loop lvl = 0; - sh = hits; + sh = hits.getHits(); } else { SearchHit h = sh[indexPerLevel[lvl]]; // TODO: improve this for multi-nested responses diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index d3ab0f797ae2f..676d5eb87bbff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -1211,10 +1211,7 @@ protected LogicalPlan rule(Aggregate plan) { } private static boolean foldable(Expression e) { - if (e instanceof Alias) { - e = ((Alias) e).child(); - } - return e.foldable(); + return Alias.unwrap(e).foldable(); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetResultsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetResultsAction.java index cf3c422901877..01790a8749430 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetResultsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetResultsAction.java @@ -16,6 +16,6 @@ public class SqlAsyncGetResultsAction extends ActionType { public static final String NAME = SQL_ASYNC_GET_RESULT_ACTION_NAME; private SqlAsyncGetResultsAction() { - super(NAME, SqlQueryResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetStatusAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetStatusAction.java index 366d8c606f86b..22732b1b53f4a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetStatusAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetStatusAction.java @@ -16,6 +16,6 @@ public class SqlAsyncGetStatusAction extends ActionType { public static final String NAME = SQL_ASYNC_GET_STATUS_ACTION_NAME; private SqlAsyncGetStatusAction() { - super(NAME, QlStatusResponse::new); + super(NAME); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java index c6e0b5067ee08..52a62f4b21d76 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java @@ -109,6 +109,7 @@ Collection createComponents( @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java index 1a7afdce307ed..ccff0e20a4b62 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.action.ActionType; -import org.elasticsearch.common.io.stream.Writeable; public class SqlStatsAction extends ActionType { @@ -16,6 +15,6 @@ public class SqlStatsAction extends ActionType { public static final String NAME = "cluster:monitor/xpack/sql/stats/dist"; private SqlStatsAction() { - super(NAME, Writeable.Reader.localOnly()); + super(NAME); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java index ef4b3d66b5fc1..14b13a9ab0a32 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.execution.search.extractor; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -76,7 +76,7 @@ protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) { } public void testExtractBucketCount() { - Bucket bucket = new TestBucket(emptyMap(), randomLong(), new Aggregations(emptyList())); + Bucket bucket = new TestBucket(emptyMap(), randomLong(), InternalAggregations.from(emptyList())); CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.COUNT, randomZone(), NULL); assertEquals(bucket.getDocCount(), extractor.extract(bucket)); } @@ -85,7 +85,7 @@ public void testExtractKey() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, UTC, NULL); Object value = new Object(); - Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); + Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), InternalAggregations.from(emptyList())); assertEquals(value, extractor.extract(bucket)); } @@ -93,7 +93,7 @@ public void testExtractDate() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone(), DATETIME); long millis = System.currentTimeMillis(); - Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList())); + Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), InternalAggregations.from(emptyList())); assertEquals(DateUtils.asDateTimeWithMillis(millis, extractor.zoneId()), extractor.extract(bucket)); } @@ -101,7 +101,7 @@ public void testExtractIncorrectDateKey() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone(), DATETIME); Object value = new Object(); - Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); + Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), InternalAggregations.from(emptyList())); SqlIllegalArgumentException exception = expectThrows(SqlIllegalArgumentException.class, () -> extractor.extract(bucket)); assertEquals("Invalid date key returned: " + value, exception.getMessage()); } @@ -109,7 +109,7 @@ public void testExtractIncorrectDateKey() { public void testExtractUnsignedLong() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone(), UNSIGNED_LONG); Long value = randomLong(); - Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); + Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), InternalAggregations.from(emptyList())); assertEquals(BigInteger.valueOf(value).and(UNSIGNED_LONG_MAX), extractor.extract(bucket)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java index d06a239e61ce7..112be29d2dcd8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java @@ -82,7 +82,7 @@ public void testGet() { double value = randomDouble(); double expected = Math.log(value); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); assertEquals(expected, extractor.process(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 5c3fc378d90c1..b951f96e8b933 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -94,7 +94,7 @@ public void testGetDottedValueWithDocValues() { } DocumentField field = new DocumentField(fieldName, documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); assertEquals(result, extractor.extract(hit)); @@ -112,7 +112,7 @@ public void testGetDocValue() { documentFieldValues.add(randomValue()); } DocumentField field = new DocumentField(fieldName, documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); assertEquals(result, extractor.extract(hit)); @@ -127,7 +127,7 @@ public void testGetDate() { ZonedDateTime zdt = DateUtils.asDateTimeWithMillis(millis, zoneId).plusNanos(nanosOnly); List documentFieldValues = Collections.singletonList(StringUtils.toString(zdt)); DocumentField field = new DocumentField("my_date_nanos_field", documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("my_date_nanos_field", field); FieldHitExtractor extractor = new FieldHitExtractor("my_date_nanos_field", DATETIME, zoneId, LENIENT); assertEquals(zdt, extractor.extract(hit)); @@ -144,7 +144,7 @@ public void testMultiValuedDocValue() { String fieldName = randomAlphaOfLength(5); FieldHitExtractor fe = getFieldHitExtractor(fieldName); DocumentField field = new DocumentField(fieldName, asList("a", "b")); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); @@ -154,7 +154,7 @@ public void testExtractSourcePath() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c"); Object value = randomValue(); DocumentField field = new DocumentField("a.b.c", singletonList(value)); - SearchHit hit = new SearchHit(1, null, null); + SearchHit hit = SearchHit.unpooled(1, null, null); hit.setDocumentField("a.b.c", field); assertThat(fe.extract(hit), is(value)); } @@ -163,7 +163,7 @@ public void testMultiValuedSource() { FieldHitExtractor fe = getFieldHitExtractor("a"); Object value = randomValue(); DocumentField field = new DocumentField("a", asList(value, value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("a", field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); @@ -174,7 +174,7 @@ public void testMultiValuedSourceAllowed() { Object valueA = randomValue(); Object valueB = randomValue(); DocumentField field = new DocumentField("a", asList(valueA, valueB)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("a", field); assertEquals(valueA, fe.extract(hit)); } @@ -187,7 +187,7 @@ public void testGeoShapeExtraction() { map.put("coordinates", asList(1d, 2d)); map.put("type", "Point"); DocumentField field = new DocumentField(fieldName, singletonList(map)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); assertEquals(new GeoShape(1, 2), fe.extract(hit)); @@ -204,14 +204,14 @@ public void testMultipleGeoShapeExtraction() { map2.put("coordinates", asList(3d, 4d)); map2.put("type", "Point"); DocumentField field = new DocumentField(fieldName, asList(map1, map2)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, LENIENT); - SearchHit searchHit = new SearchHit(1, "1"); + SearchHit searchHit = SearchHit.unpooled(1, "1"); searchHit.setDocumentField(fieldName, new DocumentField(fieldName, singletonList(map2))); assertEquals(new GeoShape(3, 4), lenientFe.extract(searchHit)); } @@ -223,7 +223,7 @@ public void testUnsignedLongExtraction() { String fieldName = randomAlphaOfLength(10); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); FieldHitExtractor fe = new FieldHitExtractor(fieldName, UNSIGNED_LONG, randomZone(), randomBoolean() ? NONE : LENIENT); @@ -237,7 +237,7 @@ public void testVersionExtraction() { String fieldName = randomAlphaOfLength(10); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); FieldHitExtractor fe = new FieldHitExtractor(fieldName, VERSION, randomZone(), randomBoolean() ? NONE : LENIENT); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractorTests.java index 32de6cd303969..bed6593f9e320 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractorTests.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.sql.execution.search.extractor; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -75,7 +75,7 @@ protected MetricAggExtractor mutateInstance(MetricAggExtractor instance) { } public void testNoAggs() { - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(emptyList())); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(emptyList())); MetricAggExtractor extractor = randomMetricAggExtractor(); SqlIllegalArgumentException exception = expectThrows(SqlIllegalArgumentException.class, () -> extractor.extract(bucket)); assertEquals("Cannot find an aggregation named " + extractor.name(), exception.getMessage()); @@ -85,8 +85,8 @@ public void testSingleValueProperty() { MetricAggExtractor extractor = new MetricAggExtractor("field", "property", "innerKey", null); double value = randomDouble(); - Aggregation agg = new TestSingleValueAggregation(extractor.name(), singletonList(extractor.property()), value); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new TestSingleValueAggregation(extractor.name(), singletonList(extractor.property()), value); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(value, extractor.extract(bucket)); } @@ -95,20 +95,20 @@ public void testSingleValuePropertyDate() { MetricAggExtractor extractor = new MetricAggExtractor("my_date_field", "property", "innerKey", zoneId, DATETIME); double value = randomDouble(); - Aggregation agg = new TestSingleValueAggregation(extractor.name(), singletonList(extractor.property()), value); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new TestSingleValueAggregation(extractor.name(), singletonList(extractor.property()), value); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(DateUtils.asDateTimeWithMillis((long) value, zoneId), extractor.extract(bucket)); } public void testSingleValueInnerKey() { MetricAggExtractor extractor = new MetricAggExtractor("field", "property", "innerKey", null); double innerValue = randomDouble(); - Aggregation agg = new TestSingleValueAggregation( + InternalAggregation agg = new TestSingleValueAggregation( extractor.name(), singletonList(extractor.property()), singletonMap(extractor.innerKey(), innerValue) ); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(innerValue, extractor.extract(bucket)); } @@ -117,12 +117,12 @@ public void testSingleValueInnerKeyDate() { MetricAggExtractor extractor = new MetricAggExtractor("field", "property", "innerKey", zoneId, DATE); double innerValue = randomDouble(); - Aggregation agg = new TestSingleValueAggregation( + InternalAggregation agg = new TestSingleValueAggregation( extractor.name(), singletonList(extractor.property()), singletonMap(extractor.innerKey(), innerValue) ); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(DateUtils.asDateTimeWithMillis((long) innerValue, zoneId), extractor.extract(bucket)); } @@ -130,8 +130,8 @@ public void testMultiValueProperty() { MetricAggExtractor extractor = new MetricAggExtractor("field", "property", "innerKey", null); double value = randomDouble(); - Aggregation agg = new TestMultiValueAggregation(extractor.name(), singletonMap(extractor.property(), value)); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new TestMultiValueAggregation(extractor.name(), singletonMap(extractor.property(), value)); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(value, extractor.extract(bucket)); } @@ -140,8 +140,8 @@ public void testMultiValuePropertyDate() { MetricAggExtractor extractor = new MetricAggExtractor("field", "property", "innerKey", zoneId, DATETIME); double value = randomDouble(); - Aggregation agg = new TestMultiValueAggregation(extractor.name(), singletonMap(extractor.property(), value)); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new TestMultiValueAggregation(extractor.name(), singletonMap(extractor.property(), value)); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(DateUtils.asDateTimeWithMillis((long) value, zoneId), extractor.extract(bucket)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java index fdce6cbcf0c2f..5d007218aeeb1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java @@ -14,7 +14,7 @@ public void testGet() { int times = between(1, 1000); for (int i = 0; i < times; i++) { float score = randomFloat(); - SearchHit hit = new SearchHit(1); + SearchHit hit = SearchHit.unpooled(1); hit.score(score); assertEquals(score, ScoreExtractor.INSTANCE.extract(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java index 2233ce94c0985..cb832cbd4b2d4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.sql.execution.search.extractor; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; import org.elasticsearch.xcontent.XContentBuilder; @@ -17,9 +17,9 @@ class TestBucket implements Bucket { private final Map key; private final long count; - private final Aggregations aggs; + private final InternalAggregations aggs; - TestBucket(Map key, long count, Aggregations aggs) { + TestBucket(Map key, long count, InternalAggregations aggs) { this.key = key; this.count = count; this.aggs = aggs; @@ -46,7 +46,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggs; } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java index b7f123f82cf98..98e42e31143c3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.test.ESTestCase; @@ -63,7 +63,7 @@ protected TopHitsAggExtractor mutateInstance(TopHitsAggExtractor instance) { } public void testNoAggs() { - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(emptyList())); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(emptyList())); TopHitsAggExtractor extractor = randomTopHitsAggExtractor(); SqlIllegalArgumentException exception = expectThrows(SqlIllegalArgumentException.class, () -> extractor.extract(bucket)); assertEquals("Cannot find an aggregation named " + extractor.name(), exception.getMessage()); @@ -72,8 +72,8 @@ public void testNoAggs() { public void testZeroNullValue() { TopHitsAggExtractor extractor = randomTopHitsAggExtractor(); - Aggregation agg = new InternalTopHits(extractor.name(), 0, 0, null, SearchHits.EMPTY_WITH_TOTAL_HITS, null); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new InternalTopHits(extractor.name(), 0, 0, null, SearchHits.EMPTY_WITH_TOTAL_HITS, null); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertNull(extractor.extract(bucket)); } @@ -81,8 +81,8 @@ public void testExtractValue() { TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataTypes.KEYWORD, UTC); String value = "Str_Value"; - Aggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(value, extractor.extract(bucket)); } @@ -91,7 +91,7 @@ public void testExtractDateValue() { TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataTypes.DATETIME, zoneId); long value = 123456789L; - Aggregation agg = new InternalTopHits( + InternalAggregation agg = new InternalTopHits( extractor.name(), 0, 1, @@ -99,7 +99,7 @@ public void testExtractDateValue() { searchHitsOf(StringUtils.toString(DateUtils.asDateTimeWithMillis(value, zoneId))), null ); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(DateUtils.asDateTimeWithMillis(value, zoneId), extractor.extract(bucket)); } @@ -108,14 +108,14 @@ public void testExtractUnsignedLong() { Object value = bi.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) <= 0 ? bi.longValue() : bi; TopHitsAggExtractor extractor = new TopHitsAggExtractor(randomAlphaOfLength(10), DataTypes.UNSIGNED_LONG, randomZone()); - Aggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null); - Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg))); + InternalAggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null); + Bucket bucket = new TestBucket(emptyMap(), 0, InternalAggregations.from(singletonList(agg))); assertEquals(bi, extractor.extract(bucket)); } private SearchHits searchHitsOf(Object value) { TotalHits totalHits = new TotalHits(10, TotalHits.Relation.EQUAL_TO); - SearchHit searchHit = new SearchHit(1, "docId"); + SearchHit searchHit = SearchHit.unpooled(1, "docId"); searchHit.addDocumentFields( Collections.singletonMap("topHitsAgg", new DocumentField("field", Collections.singletonList(value))), Collections.singletonMap( @@ -123,6 +123,6 @@ private SearchHits searchHitsOf(Object value) { new DocumentField("_ignored", Collections.singletonList(randomValueOtherThan(value, () -> randomAlphaOfLength(5)))) ) ); - return new SearchHits(new SearchHit[] { searchHit }, totalHits, 0.0f); + return SearchHits.unpooled(new SearchHit[] { searchHit }, totalHits, 0.0f); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java index 4485c883ca30f..6513d72eaf1f8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java @@ -49,6 +49,7 @@ public void testSqlDisabledIsNoOp() { assertThat( plugin.getRestHandlers( Settings.EMPTY, + mock(NamedWriteableRegistry.class), mock(RestController.class), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml new file mode 100644 index 0000000000000..0f8dbbb97f57f --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml @@ -0,0 +1,263 @@ +--- +setup: + - skip: + version: " - 8.12.99" + reason: "feature added in 8.13" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 5 + mappings: + properties: + data: + type: long + data_d: + type: double + count: + type: long + count_d: + type: double + time: + type: long + color: + type: keyword + always_null: + type: keyword + non_null_out_of_match: + type: keyword + - do: + bulk: + index: "test" + refresh: true + body: + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275187, "color": "red" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275188, "color": "blue" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275189, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275190, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275191, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275192, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275193, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275194, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275195, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275196, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275197, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275198, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275199, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275200, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275201, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275202, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275203, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275204, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275205, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275206, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275207, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275208, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275209, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275210, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275211, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275212, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275213, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275214, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275215, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275216, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275217, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275218, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275219, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275220, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275221, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275222, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275223, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275224, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275225, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275226, "color": "red", "non_null_out_of_match": "a" } + +--- +row wise and keep null: + - do: + esql.query: + drop_null_columns: false + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: false + + - length: {columns: 8} + - match: {columns.0.name: "always_null"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "count"} + - match: {columns.2.type: "long"} + - match: {columns.3.name: "count_d"} + - match: {columns.3.type: "double"} + - match: {columns.4.name: "data"} + - match: {columns.4.type: "long"} + - match: {columns.5.name: "data_d"} + - match: {columns.5.type: "double"} + - match: {columns.6.name: "non_null_out_of_match"} + - match: {columns.6.type: "keyword"} + - match: {columns.7.name: "time"} + - match: {columns.7.type: "long"} + - length: {values: 2} + - length: {values.0: 8} + - is_false: values.0.0 + - match: {values.0.1: red} + +--- +row wise and drop null: + - do: + esql.query: + drop_null_columns: true + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: false + + - length: {all_columns: 8} + - match: {all_columns.0.name: "always_null"} + - match: {all_columns.0.type: "keyword"} + - match: {all_columns.1.name: "color"} + - match: {all_columns.1.type: "keyword"} + - match: {all_columns.2.name: "count"} + - match: {all_columns.2.type: "long"} + - match: {all_columns.3.name: "count_d"} + - match: {all_columns.3.type: "double"} + - match: {all_columns.4.name: "data"} + - match: {all_columns.4.type: "long"} + - match: {all_columns.5.name: "data_d"} + - match: {all_columns.5.type: "double"} + - match: {all_columns.6.name: "non_null_out_of_match"} + - match: {all_columns.6.type: "keyword"} + - match: {all_columns.7.name: "time"} + - match: {all_columns.7.type: "long"} + - length: {columns: 6} + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "count"} + - match: {columns.1.type: "long"} + - match: {columns.2.name: "count_d"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "data"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "data_d"} + - match: {columns.4.type: "double"} + - match: {columns.5.name: "time"} + - match: {columns.5.type: "long"} + - length: {values: 2} + - length: {values.0: 6} + - match: {values.0.0: red} + +--- +columnar and keep null: + - do: + esql.query: + drop_null_columns: false + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: true + + - length: {columns: 8} + - match: {columns.0.name: "always_null"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "count"} + - match: {columns.2.type: "long"} + - match: {columns.3.name: "count_d"} + - match: {columns.3.type: "double"} + - match: {columns.4.name: "data"} + - match: {columns.4.type: "long"} + - match: {columns.5.name: "data_d"} + - match: {columns.5.type: "double"} + - match: {columns.6.name: "non_null_out_of_match"} + - match: {columns.6.type: "keyword"} + - match: {columns.7.name: "time"} + - match: {columns.7.type: "long"} + - length: {values: 8} + - length: {values.0: 2} + - is_false: values.0.0 + - match: {values.1.0: red} + +--- +columnar and drop null: + - do: + esql.query: + drop_null_columns: true + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: true + + - length: {all_columns: 8} + - match: {all_columns.0.name: "always_null"} + - match: {all_columns.0.type: "keyword"} + - match: {all_columns.1.name: "color"} + - match: {all_columns.1.type: "keyword"} + - match: {all_columns.2.name: "count"} + - match: {all_columns.2.type: "long"} + - match: {all_columns.3.name: "count_d"} + - match: {all_columns.3.type: "double"} + - match: {all_columns.4.name: "data"} + - match: {all_columns.4.type: "long"} + - match: {all_columns.5.name: "data_d"} + - match: {all_columns.5.type: "double"} + - match: {all_columns.6.name: "non_null_out_of_match"} + - match: {all_columns.6.type: "keyword"} + - match: {all_columns.7.name: "time"} + - match: {all_columns.7.type: "long"} + - length: {columns: 6} + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "count"} + - match: {columns.1.type: "long"} + - match: {columns.2.name: "count_d"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "data"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "data_d"} + - match: {columns.4.type: "double"} + - match: {columns.5.name: "time"} + - match: {columns.5.type: "long"} + - length: {values: 6} + - length: {values.0: 2} + - match: {values.0.0: red} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml new file mode 100644 index 0000000000000..b40564cdac1de --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml @@ -0,0 +1,314 @@ +--- +setup: + - skip: + version: "all" + reason: "waiting for final decisions on supporting generic expressions on the right https://github.com/elastic/elasticsearch/issues/103599" + + features: allowed_warnings_regex + - do: + indices.create: + index: test + body: + mappings: + properties: + id: + type: long + keyword: + type: keyword + keywordUpper: + type: keyword + text: + type: text + textCamel: + type: text + wildcard: + type: keyword + wildcardText: + type: text + + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "id": 0, "keyword": "Foo", "keywordUpper": "FOO", "text": "foo", "textCamel": "FoO", "wildcard": "Foo*", "wildcardText": "FOo*" } + - { "index": { } } + - { "id": 1, "keyword": "Foo", "keywordUpper": "BAR", "text": "baz", "textCamel": "BaR", "wildcard": "Bar?", "wildcardText": "bar?" } + - { "index": { } } + - { "id": 2, "keyword": "abc", "keywordUpper": "XYZ", "text": "def", "textCamel": "GhI", "wildcard": "jkl?", "wildcardText": "MNO?" } + +--- +"insensitive equals field vs field keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where keyword =~ keywordUpper | keep id, keyword, keywordUpper' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "keyword" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "keywordUpper" } + - match: { columns.2.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 0, "Foo", "FOO"] } + +--- +"insensitive equals field vs field text": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where text =~ textCamel | keep id, text, textCamel' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "text" } + - match: { columns.1.type: "text" } + - match: { columns.2.name: "textCamel" } + - match: { columns.2.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 0, "foo", "FoO"] } + + +--- +"insensitive equals keyword field vs text field": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where keyword =~ text | keep id, keyword, text' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "keyword" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "text" } + - match: { columns.2.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 0, "Foo", "foo"] } + + +--- +"insensitive equals keyword field vs text field 2": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where keywordUpper =~ textCamel | keep id, keywordUpper, textCamel | sort id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "keywordUpper" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "textCamel" } + - match: { columns.2.type: "text" } + + - length: { values: 2 } + - match: { values.0: [ 0, "FOO", "FoO"] } + - match: { values.1: [ 1, "BAR", "BaR"] } + + +--- +"wildcards": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where keywordUpper =~ "fo*" | keep id, keywordUpper' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "keywordUpper" } + - match: { columns.1.type: "keyword" } + + - length: { values: 0 } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcard =~ "foo*" | keep id, wildcard' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 0, "Foo*"] } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcard =~ "fOo*" | keep id, wildcard' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 0, "Foo*"] } + + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where keywordUpper =~ "fo?" | keep id, keywordUpper' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "keywordUpper" } + - match: { columns.1.type: "keyword" } + + - length: { values: 0 } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcard =~ "bar?" | keep id, wildcard' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 1, "Bar?"] } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcard =~ "bAr?" | keep id, wildcard' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 1, "Bar?"] } + + +--- +"wildcards on text": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where text =~ "Fo*" | keep id, text | sort id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "text" } + - match: { columns.1.type: "text" } + + - length: { values: 0 } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcardText =~ "fOo*" | keep id, wildcardText' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcardText" } + - match: { columns.1.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 0, "FOo*"] } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcardText =~ "bAr?" | keep id, wildcardText' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcardText" } + - match: { columns.1.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 1, "bar?"] } + + +--- +"wildcards on text no match": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where text =~ "fo\\*" | keep id, text' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "text" } + - match: { columns.1.type: "text" } + + - length: { values: 0 } + + +--- +"wildcards on keyword vs text": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "wildcardText" } + - match: { columns.2.type: "text" } + + - length: { values: 2 } + - match: { values.0: [ 0, "Foo*", "FOo*"] } + - match: { values.1: [ 1, "Bar?", "bar?"] } + + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | where NOT wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "wildcard" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "wildcardText" } + - match: { columns.2.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 2, "jkl?", "MNO?"] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml new file mode 100644 index 0000000000000..9a050db5132b3 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml @@ -0,0 +1,157 @@ +--- +setup: + - skip: + version: " - 8.12.99" + reason: "fixed in 8.13" + features: warnings + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 5 + mappings: + properties: + always_null: + type: long + sometimes_null: + type: long + never_null: + type: long + - do: + bulk: + index: test + refresh: true + body: + - { "index": {} } + - { "sometimes_null": 1, "never_null": 1 } + - { "index": {} } + - { "sometimes_null": 1, "never_null": 1 } + - { "index": {} } + - { "never_null": 2 } + - { "index": {} } + - { "never_null": 2 } + +--- +group on null: + - do: + esql.query: + body: + query: 'FROM test | STATS med=median(never_null) BY always_null | LIMIT 1' + columnar: true + - match: {columns.0.name: "med"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "always_null"} + - match: {columns.1.type: "long"} + - length: {values: 2} + - match: {values.0: [1.5]} + - match: {values.1: [null]} + +--- +group on null, long: + - do: + esql.query: + body: + query: 'FROM test | STATS med=median(sometimes_null) BY always_null, never_null | SORT always_null, never_null | LIMIT 10' + columnar: true + - match: {columns.0.name: "med"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "always_null"} + - match: {columns.1.type: "long"} + - match: {columns.2.name: "never_null"} + - match: {columns.2.type: "long"} + - length: {values: 3} + - match: {values.0: [1.0, null]} + - match: {values.1: [null, null]} + - match: {values.2: [1, 2]} + +--- +agg on null: + - do: + esql.query: + body: + query: 'FROM test | STATS med=median(always_null) | LIMIT 1' + columnar: true + - match: {columns.0.name: "med"} + - match: {columns.0.type: "double"} + - length: {values: 1} + - match: {values.0: [null]} + +--- +agg on missing: + - do: + catch: bad_request + esql.query: + body: + query: 'FROM test | STATS med=median(missing) | LIMIT 1' + columnar: true + +--- +group on missing: + - do: + catch: bad_request + esql.query: + body: + query: 'FROM test | STATS med=median(never_null) BY missing | LIMIT 1' + columnar: true + +--- +agg on half missing: + - do: + indices.create: + index: test2 + body: + settings: + number_of_shards: 5 + mappings: + properties: + always_null: + type: long + sometimes_null: + type: long + never_null: + type: long + missing: + type: long + + - do: + esql.query: + body: + query: 'FROM test* | STATS med=median(missing) | LIMIT 1' + columnar: true + - match: {columns.0.name: "med"} + - match: {columns.0.type: "double"} + - length: {values: 1} + - match: {values.0: [null]} + +--- +group on half missing: + - do: + indices.create: + index: test2 + body: + settings: + number_of_shards: 5 + mappings: + properties: + always_null: + type: long + sometimes_null: + type: long + never_null: + type: long + missing: + type: long + + - do: + esql.query: + body: + query: 'FROM test,test2 | STATS med=median(never_null) BY missing | LIMIT 1' + columnar: true + - match: {columns.0.name: "med"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "missing"} + - match: {columns.1.type: "long"} + - length: {values: 2} + - match: {values.0: [1.5]} + - match: {values.1: [null]} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index 06fc2c8a3fa99..8b28776e42fcd 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -142,7 +142,7 @@ unsupported: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -170,7 +170,7 @@ unsupported: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -191,7 +191,7 @@ unsupported: - match: { values.0.7: null } - match: { values.0.8: "POINT (10.0 12.0)" } - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: null } + - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } @@ -205,7 +205,7 @@ unsupported: - match: { values.0.21: null } - match: { values.0.22: null } - match: { values.0.23: null } - - match: { values.0.24: null } + - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - match: { values.0.25: 12 } - match: { values.0.26: xy } - match: { values.0.27: "foo bar" } @@ -238,7 +238,7 @@ unsupported: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -266,7 +266,7 @@ unsupported: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -282,8 +282,8 @@ unsupported: - do: esql.query: body: - query: 'from test | keep shape | limit 0' - - match: { columns.0.name: shape } + query: 'from test | keep histogram | limit 0' + - match: { columns.0.name: histogram } - match: { columns.0.type: unsupported } - length: { values: 0 } @@ -322,7 +322,7 @@ unsupported with sort: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -350,7 +350,7 @@ unsupported with sort: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -371,7 +371,7 @@ unsupported with sort: - match: { values.0.7: null } - match: { values.0.8: "POINT (10.0 12.0)" } - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: null } + - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } @@ -385,7 +385,7 @@ unsupported with sort: - match: { values.0.21: null } - match: { values.0.22: null } - match: { values.0.23: null } - - match: { values.0.24: null } + - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - match: { values.0.25: 12 } - match: { values.0.26: xy } - match: { values.0.27: "foo bar" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 1673453824584..2c3c919f2b37d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -66,6 +66,12 @@ setup: - { "index": { } } - { "name": "Denise", "city_id": "sgn" } +--- +teardown: + - do: + enrich.delete_policy: + name: cities_policy + --- "Basic": - do: @@ -129,8 +135,3 @@ setup: - match: { values.1: [ "Bob", "nyc", "USA" ] } - match: { values.2: [ "Denise", "sgn", null ] } - match: { values.3: [ "Mario", "rom", "Italy" ] } - - - do: - enrich.delete_policy: - name: cities_policy - - is_true: acknowledged diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 0d49f169fc4b2..70a4e0da11b99 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -73,6 +73,13 @@ setup: - { "@timestamp": "2023-06-22", "ip": "10.101.0.107", "message": "network disconnected" } - { "index": { } } - { "@timestamp": "2023-06-24", "ip": "13.101.0.114", "message": "authentication failed" } + +--- +teardown: + - do: + enrich.delete_policy: + name: networks-policy + --- "IP strings": @@ -97,8 +104,3 @@ setup: - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], [ "Production", "QA" ], [ "OPS","Engineering" ], "sending messages" ] } - match: { values.2: [ "10.101.0.107" , "QA", "Engineering", "network disconnected" ] } - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } - - - do: - enrich.delete_policy: - name: networks-policy - - is_true: acknowledged diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml index 5f1112197f383..ea5ecd0511596 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml @@ -68,3 +68,223 @@ - do: enrich.delete_policy: name: departments-policy + +--- +movies: + # This is a subset of a demo that we use for ESQL's ENRICH command. + - skip: + version: " - 8.12.99" + reason: "there's a bug in sub-fields in 8.12" + - do: + indices.create: + index: elastiflix-movies + body: + mappings: + properties: + adult: + type: boolean + backdrop_path: + type: keyword + belongs_to_collection: + properties: + backdrop_path: + type: text + fields: + keyword: + type: keyword + ignore_above: 256 + id: + type: long + name: + type: text + fields: + keyword: + type: keyword + ignore_above: 256 + poster_path: + type: text + fields: + keyword: + type: keyword + ignore_above: 256 + budget: + type: long + cast: + type: keyword + genres: + type: keyword + homepage: + type: keyword + id: + type: long + imdb_id: + type: keyword + keywords: + properties: + id: + type: long + name: + type: text + fields: + keyword: + type: keyword + ignore_above: 256 + original_language: + type: text + original_title: + type: text + overview: + type: text + popularity: + type: long + poster_path: + type: keyword + production_companies: + type: keyword + production_countries: + type: keyword + rating: + type: keyword + release_date: + type: date + revenue: + type: long + runtime: + type: long + spoken_languages: + type: keyword + status: + type: keyword + tagline: + type: text + title: + type: text + fields: + completion: + type: completion + analyzer: simple + preserve_separators: true + preserve_position_increments: true + max_input_length: 50 + user_score: + type: long + video: + type: boolean + vote_average: + type: long + vote_count: + type: long + settings: + index: + number_of_shards: 1 + number_of_replicas: 1 + + - do: + bulk: + index: elastiflix-movies + refresh: true + body: + # This test data comes from https://developer.themoviedb.org/docs/faq#what-are-the-attribution-requirements . Elasticsearch's distribution doesn't include this data. + - { "index": { } } + - { "adult": false,"backdrop_path": "/Ab8mkHmkYADjU7wQiOkia9BzGvS.jpg","belongs_to_collection": null,"budget": 19000000,"genres": [ "Animation","Family","Fantasy" ],"homepage": "http://movies.disney.com/spirited-away","id": 129,"imdb_id": "tt0245429","original_language": "ja","original_title": "千と千尋の神隠し","overview": "A young girl, Chihiro, becomes trapped in a strange new world of spirits. When her parents undergo a mysterious transformation, she must call upon the courage she never knew she had to free her family.","popularity": 90.865,"poster_path": "/39wmItIWsg5sZMyRUHLkWBcuVCM.jpg","production_companies": [ "Studio Ghibli" ],"production_countries": [ "Japan" ],"release_date": "2001-07-20","revenue": 274925095,"runtime": 125,"spoken_languages": [ "日本語" ],"status": "Released","tagline": "","title": "Spirited Away","video": false,"vote_average": 8.54,"vote_count": 13662,"rating": "PG","cast": [ "Rumi Hiiragi","Miyu Irino","Mari Natsuki","Takashi Naito","Yasuko Sawaguchi","Tatsuya Gashûin","Yumi Tamai","Yo Oizumi","Koba Hayashi","Tsunehiko Kamijô","Takehiko Ono","Ryunosuke Kamiki","Bunta Sugawara","Akio Nakamura","Ken Yasuda","Shirō Saitō","Michiko Yamamoto","Kaori Yamagata","Shigeyuki Totsugi" ],"keywords": [ { "id": 616,"name": "witch" },{ "id": 970,"name": "parent child relationship" },{ "id": 2343,"name": "magic" },{ "id": 3762,"name": "darkness" },{ "id": 4652,"name": "bath house" },{ "id": 13153,"name": "spirit" },{ "id": 33465,"name": "parallel world" },{ "id": 155582,"name": "amusement park" },{ "id": 196509,"name": "youkai" },{ "id": 207695,"name": "japanese mythology" },{ "id": 210024,"name": "anime" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/lXhgCODAbBXL5buk9yEmTpOoOgR.jpg","belongs_to_collection": { "id": 119,"name": "The Lord of the Rings Collection","poster_path": "/nSNle6UJNNuEbglNvXt67m1a1Yn.jpg","backdrop_path": "/bccR2CGTWVVSZAG0yqmy3DIvhTX.jpg" },"budget": 94000000,"genres": [ "Adventure","Fantasy","Action" ],"homepage": "http://www.lordoftherings.net","id": 122,"imdb_id": "tt0167260","original_language": "en","original_title": "The Lord of the Rings: The Return of the King","overview": "Aragorn is revealed as the heir to the ancient kings as he, Gandalf and the other members of the broken fellowship struggle to save Gondor from Sauron's forces. Meanwhile, Frodo and Sam take the ring closer to the heart of Mordor, the dark lord's realm.","popularity": 80.309,"poster_path": "/rCzpDGLbOoPwLjy3OAm5NUPOTrC.jpg","production_companies": [ "WingNut Films","New Line Cinema","The Saul Zaentz Company" ],"production_countries": [ "New Zealand","United States of America" ],"release_date": "2003-12-01","revenue": 1118888979,"runtime": 201,"spoken_languages": [ "English" ],"status": "Released","tagline": "The eye of the enemy is moving.","title": "The Lord of the Rings: The Return of the King","video": false,"vote_average": 8.471,"vote_count": 20702,"rating": "PG-13","cast": [ "Elijah Wood","Ian McKellen","Viggo Mortensen","Liv Tyler","Orlando Bloom","John Rhys-Davies","Sean Astin","Billy Boyd","Dominic Monaghan","Andy Serkis","Hugo Weaving","Ian Holm","Bernard Hill","Miranda Otto","David Wenham","Karl Urban","John Noble","Cate Blanchett","Lawrence Makoare","Paul Norell","Thomas Robins","Sarah McLeod","Sean Bean","Marton Csokas","Christopher Lee","David Aston","Sadwyn Brophy","Alistair Browning","Ian Hughes" ],"keywords": [ { "id": 603,"name": "elves" },{ "id": 604,"name": "dwarf" },{ "id": 606,"name": "orcs" },{ "id": 818,"name": "based on novel or book" },{ "id": 3301,"name": "suspicion" },{ "id": 3347,"name": "volcano" },{ "id": 3930,"name": "bravery" },{ "id": 6782,"name": "addiction" },{ "id": 9812,"name": "honor" },{ "id": 9920,"name": "royalty" },{ "id": 11173,"name": "troll" },{ "id": 14707,"name": "brutality" },{ "id": 162846,"name": "ghost" },{ "id": 177912,"name": "wizard" },{ "id": 207372,"name": "quest" },{ "id": 209220,"name": "live action and animation" },{ "id": 234213,"name": "sword and sorcery" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/vXzqV2RtJlT9WhNEM4NYWCEC6ZI.jpg","belongs_to_collection": null,"budget": 14000000,"genres": [ "Drama","Romance" ],"homepage": "http://www.brokebackmountain.com","id": 142,"imdb_id": "tt0388795","original_language": "en","original_title": "Brokeback Mountain","overview": "Two modern-day cowboys meet on a shepherding job in the summer of '63, the two share a raw and powerful summer together that turns into a lifelong relationship conflict","popularity": 26.731,"poster_path": "/fsbzfe9eLOEl5rJX04nPEr1eoAi.jpg","production_companies": [ "River Road Entertainment","Alberta Film Entertainment","Focus Features","Good Machine" ],"production_countries": [ "Canada","United States of America" ],"release_date": "2005-09-10","revenue": 178043761,"runtime": 134,"spoken_languages": [ "English" ],"status": "Released","tagline": "Love is a force of nature","title": "Brokeback Mountain","video": false,"vote_average": 7.805,"vote_count": 6003,"rating": "R","cast": [ "Heath Ledger","Jake Gyllenhaal","Randy Quaid","Michelle Williams","Anne Hathaway","Larry Reese","Valerie Planche","Marty Antonini","Linda Cardellini","Anna Faris","David Harbour","Peter McRobbie","Kate Mara","Dave Trimble","Roberta Maxwell" ],"keywords": [ { "id": 548,"name": "countryside" },{ "id": 1013,"name": "homophobia" },{ "id": 1016,"name": "wyoming, usa" },{ "id": 3010,"name": "intolerance" },{ "id": 5809,"name": "marriage crisis" },{ "id": 7879,"name": "secret love" },{ "id": 11524,"name": "in the closet" },{ "id": 13088,"name": "summer" },{ "id": 155291,"name": "cowboy" },{ "id": 158718,"name": "lgbt" },{ "id": 165086,"name": "star crossed lovers" },{ "id": 208992,"name": "1960s" },{ "id": 239239,"name": "closeted homosexual" },{ "id": 240305,"name": "gay love" },{ "id": 258533,"name": "gay theme" },{ "id": 264384,"name": "gay" },{ "id": 265777,"name": "gay relationship" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/3h1JZGDhZ8nzxdgvkxha0qBqi05.jpg","belongs_to_collection": null,"budget": 55000000,"genres": [ "Comedy","Drama","Romance" ],"homepage": "https://www.paramountmovies.com/movies/forrest-gump","id": 13,"imdb_id": "tt0109830","original_language": "en","original_title": "Forrest Gump","overview": "A man with a low IQ has accomplished great things in his life and been present during significant historic events—in each case, far exceeding what anyone imagined he could do. But despite all he has achieved, his one true love eludes him.","popularity": 56.904,"poster_path": "/arw2vcBveWOVZr6pxd9XTd1TdQa.jpg","production_companies": [ "Paramount","Wendy Finerman Productions","The Steve Tisch Company" ],"production_countries": [ "United States of America" ],"release_date": "1994-06-23","revenue": 677387716,"runtime": 142,"spoken_languages": [ "English" ],"status": "Released","tagline": "The world will never be the same once you've seen it through the eyes of Forrest Gump.","title": "Forrest Gump","video": false,"vote_average": 8.5,"vote_count": 23748,"rating": "PG-13","cast": [ "Tom Hanks","Robin Wright","Gary Sinise","Mykelti Williamson","Sally Field","Michael Conner Humphreys","Hanna Hall","Haley Joel Osment","Siobhan Fallon","Afemo Omilami","Peter Dobson","Sonny Shroyer","George Kelly","Sam Anderson","Margo Moorer","Christopher Jones","Kevin Mangan","Brett Rice","Daniel C. Striepeke","David Brisbin","Kirk Ward","Marlena Smalls","Kitty K. Green","Mark Matheisen","Al Harrington","Jed Gillin","Don Fischer","Matt Wallace","Mike Jolly","Michael Kemmerling","John Voldstad","Daniel J. Gillooly","Michael Burgess","Steven Griffith","Michael McFall","Michael McFall","Byron Minns","Stephen Bridgewater","John William Galt","Isabel Rose","Richard D'Alessandro","Michael Jace","Geoffrey Blake","Vanessa Roth","Dick Cavett","Tiffany Salerno","Tiffany Salerno","Joe Alaskey","Lazarus Jackson","Lazarus Jackson","Nora Dunfee","Hallie D'Amore","Michael Mattison","Charles Boswell","Timothy McNeil","Bob Penny","Greg Brown","Troy Christian","Bryan Hanna","Zach Hanner","Aaron Michael Lacey","Jacqueline Lovell","Brendan Shanahan","William Shipman","Robb Skyler","Mary Ellen Trainor" ],"keywords": [ { "id": 422,"name": "vietnam veteran" },{ "id": 458,"name": "hippie" },{ "id": 478,"name": "china" },{ "id": 521,"name": "washington dc, usa" },{ "id": 641,"name": "single parent" },{ "id": 791,"name": "mentally disabled" },{ "id": 818,"name": "based on novel or book" },{ "id": 828,"name": "waitress" },{ "id": 840,"name": "usa president" },{ "id": 895,"name": "optimism" },{ "id": 970,"name": "parent child relationship" },{ "id": 1228,"name": "1970s" },{ "id": 1646,"name": "autism" },{ "id": 1803,"name": "drug addiction" },{ "id": 2075,"name": "alabama" },{ "id": 2957,"name": "vietnam war" },{ "id": 4170,"name": "john f. kennedy" },{ "id": 4243,"name": "black panther party" },{ "id": 5526,"name": "ping pong" },{ "id": 5709,"name": "bus stop" },{ "id": 6054,"name": "friendship" },{ "id": 6593,"name": "stripper" },{ "id": 6733,"name": "bullying" },{ "id": 7422,"name": "moon landing" },{ "id": 8646,"name": "ronald reagan" },{ "id": 9376,"name": "richard nixon" },{ "id": 10163,"name": "cancer" },{ "id": 10229,"name": "singer" },{ "id": 10235,"name": "family relationships" },{ "id": 13095,"name": "death of lover" },{ "id": 34079,"name": "death" },{ "id": 40895,"name": "illness" },{ "id": 41372,"name": "leg brace" },{ "id": 155794,"name": "physical abuse" },{ "id": 155882,"name": "dying mother" },{ "id": 156872,"name": "ill mother" },{ "id": 157213,"name": "death of mother" },{ "id": 159289,"name": "historical event" },{ "id": 162365,"name": "military" },{ "id": 162735,"name": "disability" },{ "id": 166980,"name": "shrimping" },{ "id": 181557,"name": "anti war protest" },{ "id": 187300,"name": "low iq" },{ "id": 190532,"name": "false history" },{ "id": 191600,"name": "assassination attempt" },{ "id": 192793,"name": "vietnam flashback" },{ "id": 194413,"name": "intellectual disability" },{ "id": 208992,"name": "1960s" },{ "id": 240237,"name": "mother son relationship" },{ "id": 260132,"name": "monument valley" },{ "id": 264551,"name": "child sexual abuse" },{ "id": 272720,"name": "dying in arms" },{ "id": 278807,"name": "ptsd" },{ "id": 285587,"name": "elvis presley" },{ "id": 292050,"name": "single mom" },{ "id": 308359,"name": "disabled veteran" },{ "id": 308360,"name": "war rally" },{ "id": 308361,"name": "lyndon b. johnson" },{ "id": 308363,"name": "dick cavett" },{ "id": 308365,"name": "single dad" },{ "id": 308366,"name": "ping pong diplomacy" },{ "id": 308367,"name": "college football" },{ "id": 308404,"name": "slow-witted" },{ "id": 308417,"name": "bob hope" },{ "id": 308418,"name": "neil armstrong" },{ "id": 308419,"name": "gerald ford" },{ "id": 308420,"name": "robert f. kennedy" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/9B4198IzCI0POkUIVu5yQ2s7Pzr.jpg","belongs_to_collection": null,"budget": 5000000,"genres": [ "Drama","Comedy","Music" ],"homepage": "","id": 71,"imdb_id": "tt0249462","original_language": "en","original_title": "Billy Elliot","overview": "County Durham England 1984 the miners strike has started and the police have started coming up from Bethnal Green starting a class war with the lower classes suffering caught in the middle of the conflict is 11-year old Billy Elliot who after leaving his boxing club for the day stubbles upon a ballet class and finds out that he's naturally talented. He practices with his teacher Mrs Wilkinson for an upcoming audition in Newcastle-upon Tyne the audition is for the royal Ballet school in London.","popularity": 23.85,"poster_path": "/nOr5diUZxphmAD3li9aiILyI28F.jpg","production_companies": [ "BBC Film","Tiger Aspect Productions","StudioCanal","Arts Council of England","Working Title Films","WT2 Productions" ],"production_countries": [ "France","United Kingdom" ],"release_date": "2000-09-28","revenue": 109280263,"runtime": 110,"spoken_languages": [ "English" ],"status": "Released","tagline": "Inside every one of us is a special talent waiting to come out. The trick is finding it.","title": "Billy Elliot","video": false,"vote_average": 7.588,"vote_count": 3351,"rating": "PG-13","cast": [ "Jamie Bell","Julie Walters","Jean Heywood","Jamie Draven","Gary Lewis","Stephen Mangan","Patrick Malahide","Stuart Wells","Tracey Wilkinson","Zoë Bell","Nicola Blackwell","Janine Birkett","Joe Renton","Colin MacLachlan","Trevor Fox","Charlie Hardwick","Barbara Leigh-Hunt" ],"keywords": [ { "id": 246,"name": "dancing" },{ "id": 1013,"name": "homophobia" },{ "id": 1310,"name": "mentor" },{ "id": 1566,"name": "dream" },{ "id": 1647,"name": "sadness" },{ "id": 1682,"name": "workers' quarter" },{ "id": 1773,"name": "socialism" },{ "id": 2086,"name": "youngster" },{ "id": 2684,"name": "dancing class" },{ "id": 3344,"name": "letter" },{ "id": 3929,"name": "hope" },{ "id": 4112,"name": "strike" },{ "id": 4118,"name": "ballet dancer" },{ "id": 5215,"name": "northern england" },{ "id": 5290,"name": "small person" },{ "id": 5397,"name": "street riots" },{ "id": 6054,"name": "friendship" },{ "id": 10551,"name": "ballet" },{ "id": 10683,"name": "coming of age" },{ "id": 11336,"name": "young boy" },{ "id": 12544,"name": "crying" },{ "id": 33479,"name": "single father" },{ "id": 33928,"name": "audition" },{ "id": 33940,"name": "dance class" },{ "id": 34118,"name": "retrospective" },{ "id": 162845,"name": "revolt" },{ "id": 165244,"name": "ballet school" },{ "id": 169333,"name": "emotional vulnerability" },{ "id": 181052,"name": "helping children" },{ "id": 183141,"name": "smart kid" },{ "id": 193430,"name": "street kid" },{ "id": 201554,"name": "crossdressing" },{ "id": 207010,"name": "dance teacher" },{ "id": 208289,"name": "1980s" },{ "id": 222478,"name": "miners strike" },{ "id": 240119,"name": "father son relationship" },{ "id": 243262,"name": "80s throwback" },{ "id": 246028,"name": "ballet dancing" },{ "id": 264384,"name": "gay" },{ "id": 298896,"name": "late 20th century" },{ "id": 298897,"name": "teen playing kid" },{ "id": 298898,"name": "ya" },{ "id": 302839,"name": "generational divide" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/pIESejTZpHXvc6SF0LuQWpJIUD2.jpg","belongs_to_collection": null,"budget": 2000000,"genres": [ "Thriller","Crime","Drama" ],"homepage": "http://wip.warnerbros.com/paradisenow/","id": 67,"imdb_id": "tt0445620","original_language": "en","original_title": "Paradise Now","overview": "Two childhood friends are recruited for a suicide bombing in Tel Aviv.","popularity": 9.424,"poster_path": "/qWZkYa8VdcDZk8uzRB2PfhpM9IL.jpg","production_companies": [ "Lama Films","Augustus Film","Lama Productions","Lumer Films","ARTE France Cinéma","Filmstiftung Nordrhein-Westfalen","Medienboard Berlin-Brandenburg","Razor Film Produktion","Eurimages","Nederlands Fonds voor de Film","Hazazah Pictures" ],"production_countries": [ "France","Germany","Netherlands","Palestinian Territory" ],"release_date": "2005-09-07","revenue": 3395627,"runtime": 90,"spoken_languages": [ "English","العربية" ],"status": "Released","tagline": "From the most unexpected place, come a new call for peace","title": "Paradise Now","video": false,"vote_average": 6.9,"vote_count": 204,"rating": "PG-13","cast": [ "Ali Soliman","Amer Hlehel","Hiam Abbass","Ashraf Barhom","Mohamed Bustami","Lobna Azabal","Qais Nashif" ],"keywords": [ { "id": 30,"name": "individual" },{ "id": 393,"name": "civil war" },{ "id": 536,"name": "israel" },{ "id": 537,"name": "palestine" },{ "id": 539,"name": "middle east" },{ "id": 789,"name": "muslim" },{ "id": 5907,"name": "palestinian-israeli conflict" },{ "id": 6054,"name": "friendship" },{ "id": 12993,"name": "suicide mission" },{ "id": 156052,"name": "unemployment" } ] } + - { "index": { } } + - { "adult": false,"backdrop_path": "/r8pUcom5Mw8igtBpj3AHSAUvH0R.jpg","belongs_to_collection": null,"budget": 1300000,"genres": [ "Drama","Science Fiction" ],"homepage": "","id": 19,"imdb_id": "tt0017136","original_language": "de","original_title": "Metropolis","overview": "In a futuristic city sharply divided between the rich and the poor, the son of the city's mastermind meets a prophet who predicts the coming of a savior to mediate their differences.","popularity": 22.996,"poster_path": "/hUK9rewffKGqtXynH5SW3v9hzcu.jpg","production_companies": [ "UFA" ],"production_countries": [ "Germany" ],"release_date": "1927-02-06","revenue": 650422,"runtime": 149,"spoken_languages": [ "No Language" ],"status": "Released","tagline": "There can be no understanding between the hands and the brain unless the heart acts as mediator.","title": "Metropolis","video": false,"vote_average": 8.171,"vote_count": 2207,"rating": "NR","cast": [ "Brigitte Helm","Alfred Abel","Gustav Fröhlich","Rudolf Klein-Rogge","Fritz Rasp","Theodor Loos","Erwin Biswanger","Heinrich George","Olaf Storm","Helene Weigel","Margarete Lanner","Hanns Leo Reich","Fritz Alberti","Heinrich Gotho","Grete Berger","Curt Siodmak","Rolf von Goth","Helen von Münchofen","Georg John","Arthur Reinhardt","Henrietta Siodmak" ],"keywords": [ { "id": 312,"name": "man vs machine" },{ "id": 818,"name": "based on novel or book" },{ "id": 1001,"name": "underground world" },{ "id": 1436,"name": "inventor" },{ "id": 1761,"name": "metropolis" },{ "id": 2964,"name": "future" },{ "id": 4034,"name": "class society" },{ "id": 4565,"name": "dystopia" },{ "id": 5966,"name": "tower of babel" },{ "id": 6662,"name": "delirium" },{ "id": 10028,"name": "steampunk" },{ "id": 10125,"name": "mad scientist" },{ "id": 11222,"name": "prophet" },{ "id": 14544,"name": "robot" },{ "id": 14796,"name": "destruction" },{ "id": 154802,"name": "silent film" },{ "id": 179874,"name": "expressionism" },{ "id": 185459,"name": "seven deadly sins" },{ "id": 201028,"name": "depravity" },{ "id": 232988,"name": "mob justice" },{ "id": 234109,"name": "downtrodden" },{ "id": 234110,"name": "saviour" },{ "id": 239175,"name": "social unrest" },{ "id": 244516,"name": "german expressionism" },{ "id": 255786,"name": "mediator" },{ "id": 285268,"name": "2020s" } ] } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: movies + body: + match: + indices: [ "elastiflix-movies" ] + match_field: "id" + enrich_fields: [ "title" ] + + - do: + enrich.execute_policy: + name: movies + + - do: + bulk: + index: esql-movie-logs-1 + refresh: true + body: + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","cloud_google_com/gke-os-distribution": "cos","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","topology_gke_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-provisioning": "standard","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/private-node": "false","cloud_google_com/gke-logging-variant": "DEFAULT","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "8da7ca75-372d-465d-81c7-11bcea409868","ip": "10.16.0.63","name": "python-favorite-otel-auto-f8dd869f7-nkrgr" },"namespace": "elastiflix","annotations": { },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","replicaset": { "name": "python-favorite-otel-auto-f8dd869f7" },"namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"deployment": { "name": "python-favorite-otel-auto" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "f8dd869f7" } },"message": "Added movie 67 to favorites for user 1","@timestamp": "2023-10-31T07:03:36.848Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","cloud_google_com/gke-os-distribution": "cos","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "8da7ca75-372d-465d-81c7-11bcea409868","ip": "10.16.0.63","name": "python-favorite-otel-auto-f8dd869f7-nkrgr" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-f8dd869f7" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "f8dd869f7" },"deployment": { "name": "python-favorite-otel-auto" } },"message": "Added movie 71 to favorites for user 1","@timestamp": "2023-10-31T07:03:38.013Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","topology_gke_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-provisioning": "standard","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"deployment": { "name": "python-favorite-otel-auto" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" } },"message": "Added movie 142 to favorites for user 1","@timestamp": "2023-10-22T04:40:37.320Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_kubernetes_io/region": "us-central1","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"deployment": { "name": "python-favorite-otel-auto" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" } },"message": "Added movie 13 to favorites for user 1","@timestamp": "2023-10-22T04:40:37.902Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" },"deployment": { "name": "python-favorite-otel-auto" } },"message": "Added movie 122 to favorites for user 1","@timestamp": "2023-10-22T04:40:38.032Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","topology_gke_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" },"deployment": { "name": "python-favorite-otel-auto" } },"message": "Added movie 129 to favorites for user 1","@timestamp": "2023-10-22T04:40:38.045Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","cloud_google_com/gke-os-distribution": "cos","topology_kubernetes_io/zone": "us-central1-c","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","cloud_google_com/private-node": "false","cloud_google_com/gke-logging-variant": "DEFAULT","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" },"deployment": { "name": "python-favorite-otel-auto" } },"message": "Added movie 143 to favorites for user 1","@timestamp": "2023-10-22T04:40:38.603Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","cloud_google_com/gke-os-distribution": "cos","topology_kubernetes_io/region": "us-central1","topology_kubernetes_io/zone": "us-central1-c","topology_gke_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-provisioning": "standard","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","cloud_google_com/private-node": "false","cloud_google_com/gke-logging-variant": "DEFAULT","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "fdde1a89-d405-4fe0-bd05-f79206b61ea2","ip": "10.16.0.31","name": "python-favorite-otel-auto-588746564f-pj4dl" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-588746564f" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"deployment": { "name": "python-favorite-otel-auto" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "588746564f" } },"message": "Added movie 19 to favorites for user 1","@timestamp": "2023-10-22T04:40:38.843Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/zone": "us-central1-c","topology_kubernetes_io/region": "us-central1","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","cloud_google_com/private-node": "false","cloud_google_com/gke-logging-variant": "DEFAULT","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "8da7ca75-372d-465d-81c7-11bcea409868","ip": "10.16.0.63","name": "python-favorite-otel-auto-f8dd869f7-nkrgr" },"namespace": "elastiflix","annotations": { },"replicaset": { "name": "python-favorite-otel-auto-f8dd869f7" },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"deployment": { "name": "python-favorite-otel-auto" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "f8dd869f7" } },"message": "Added movie 62 to favorites for user 1","@timestamp": "2023-10-31T07:03:54.309Z" } + - { "index": { } } + - { "kubernetes": { "container": { "name": "python-favorite-otel-auto" },"node": { "uid": "ed11e0fd-fdbe-430b-85ec-950c949f7e5e","hostname": "gke-conf-1-default-pool-f9d21bc1-594p","name": "gke-conf-1-default-pool-f9d21bc1-594p","labels": { "cloud_google_com/machine-family": "e2","cloud_google_com/gke-nodepool": "default-pool","cloud_google_com/gke-os-distribution": "cos","kubernetes_io/hostname": "gke-conf-1-default-pool-f9d21bc1-594p","topology_kubernetes_io/region": "us-central1","topology_gke_io/zone": "us-central1-c","topology_kubernetes_io/zone": "us-central1-c","kubernetes_io/arch": "amd64","cloud_google_com/gke-cpu-scaling-level": "2","cloud_google_com/gke-provisioning": "standard","failure-domain_beta_kubernetes_io/region": "us-central1","cloud_google_com/gke-max-pods-per-node": "110","cloud_google_com/gke-container-runtime": "containerd","beta_kubernetes_io/instance-type": "e2-medium","failure-domain_beta_kubernetes_io/zone": "us-central1-c","node_kubernetes_io/instance-type": "e2-medium","cloud_google_com/gke-stack-type": "IPV4","beta_kubernetes_io/os": "linux","cloud_google_com/gke-boot-disk": "pd-balanced","kubernetes_io/os": "linux","cloud_google_com/gke-logging-variant": "DEFAULT","cloud_google_com/private-node": "false","beta_kubernetes_io/arch": "amd64" } },"pod": { "uid": "8da7ca75-372d-465d-81c7-11bcea409868","ip": "10.16.0.63","name": "python-favorite-otel-auto-f8dd869f7-nkrgr" },"namespace": "elastiflix","annotations": { },"namespace_uid": "97330530-b6f3-45a8-8317-be8fbf6ef6be","replicaset": { "name": "python-favorite-otel-auto-f8dd869f7" },"namespace_labels": { "kubernetes_io/metadata_name": "elastiflix","name": "elastiflix" },"labels": { "app": "elastiflix","service": "favorite","pod-template-hash": "f8dd869f7" },"deployment": { "name": "python-favorite-otel-auto" } },"message": "Added movie 102 to favorites for user 1","@timestamp": "2023-10-31T07:04:07.412Z" } + + - do: + esql.query: + body: + query: | + FROM esql-movie-logs-* | + WHERE kubernetes.pod.name LIKE "*python*" | + WHERE message LIKE "Added*" | + KEEP @timestamp, message | + GROK message "%{NUMBER:id}" | + STATS total = count(id) BY id | + ENRICH movies ON id | + SORT total DESC, title ASC | + KEEP total, title | + LIMIT 10 + + - match: { columns.0.name: "total" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "title" } + - match: { columns.1.type: "text" } + + - length: { values: 10 } + - match: { values.0.0: 1 } + - match: { values.0.1: Billy Elliot } + - match: { values.1.0: 1 } + - match: { values.1.1: Brokeback Mountain } + - match: { values.2.0: 1 } + - match: { values.2.1: Forrest Gump } + - match: { values.3.0: 1 } + - match: { values.3.1: Metropolis } + - match: { values.4.0: 1 } + - match: { values.4.1: Paradise Now } + - match: { values.5.0: 1 } + - match: { values.5.1: Spirited Away } + - match: { values.6.0: 1 } + - match: { values.6.1: "The Lord of the Rings: The Return of the King" } + - match: { values.7.0: 1 } + - is_false: values.7.1 + - match: { values.8.0: 1 } + - is_false: values.8.1 + - match: { values.9.0: 1 } + - is_false: values.9.1 + + - do: + enrich.delete_policy: + name: movies diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml index c6124e7f75e96..80f15b9cb7414 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml @@ -59,7 +59,11 @@ setup: type: long index: false doc_values: false - + text: + type: text + text_noidx: + type: text + index: false - do: bulk: @@ -83,7 +87,9 @@ setup: "date": "2021-04-28T18:50:04.467Z", "date_noidx": "2021-04-28T18:50:04.467Z", "ip": "192.168.0.1", - "ip_noidx": "192.168.0.1" + "ip_noidx": "192.168.0.1", + "text": "bar", + "text_noidx": "bar" } --- @@ -96,6 +102,7 @@ fetch: body: query: 'from test' + - length: { columns: 18 } - match: { columns.0.name: boolean } - match: { columns.0.type: boolean } - match: { columns.1.name: boolean_noidx } @@ -128,9 +135,12 @@ fetch: - match: { columns.14.type: long } - match: { columns.15.name: long_noidx } - match: { columns.15.type: long } + - match: { columns.16.name: text } + - match: { columns.16.type: text } + - match: { columns.17.name: text_noidx } + - match: { columns.17.type: text } - length: { values: 1 } - - match: { values.0.0: true } - match: { values.0.1: true } - match: { values.0.2: "2021-04-28T18:50:04.467Z" } @@ -147,3 +157,5 @@ fetch: - match: { values.0.13: "foo" } - match: { values.0.14: 20 } - match: { values.0.15: 20 } + - match: { values.0.16: "bar" } + - match: { values.0.17: "bar" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 818dad0114eef..fdccf473b358a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -218,10 +218,6 @@ setup: --- "Test start deployment fails while model download in progress": - - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/103153" - - do: ml.put_trained_model: model_id: .elser_model_2 @@ -231,10 +227,13 @@ setup: "field_names": ["text_field"] } } + # Set a low timeout so the test doesn't actually wait + # for the model download to complete - do: catch: /Model download task is currently running\. Wait for trained model \[.elser_model_2\] download task to complete then try again/ ml.start_trained_model_deployment: model_id: .elser_model_2 + timeout: 1s - do: ml.delete_trained_model: model_id: .elser_model_2 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml index e2e220aa55456..319b84e855aaf 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 53 } + - length: { "cluster" : 55 } - length: { "index" : 22 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 5adbf782f3236..abb0d038cb2c3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -1,10 +1,8 @@ --- setup: - skip: - version: all - reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/104038 - # version: " - 8.12.99" - # reason: "Universal Profiling test infrastructure is available in 8.12+" + version: " - 8.12.99" + reason: "Universal Profiling test infrastructure is available in 8.12+" - do: cluster.put_settings: @@ -15,10 +13,11 @@ setup: - do: profiling.status: wait_for_resources_created: true + timeout: "1m" - do: bulk: - refresh: true + refresh: wait_for body: - {"create": {"_index": "profiling-events-all"}} - {"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1700504427"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["S07KmaoGhvNte78xwwRbZQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/text_structure/test_grok_pattern.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/text_structure/test_grok_pattern.yml new file mode 100644 index 0000000000000..411cebb529abf --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/text_structure/test_grok_pattern.yml @@ -0,0 +1,43 @@ +"Grok pattern with two named fields": + - do: + text_structure.test_grok_pattern: + body: + grok_pattern: "%{WORD}.*%{WORD:first_name} %{WORD:last_name}!" + text: + - "Hello Dave Roberts!" + - "this does not match" + - match: + matches: + - matched: true + fields: + first_name: + - match: Dave + offset: 8 + length: 4 + last_name: + - match: Roberts + offset: 13 + length: 7 + - matched: false +--- +"Grok pattern with two identically named fields": + - do: + text_structure.test_grok_pattern: + ecs_compatibility: v1 + body: + grok_pattern: "%{WORD}.*%{WORD:name} %{WORD:name}!" + text: + - "Hello Dave Roberts!" + - "this does not match" + - match: + matches: + - matched: true + fields: + name: + - match: Dave + offset: 8 + length: 4 + - match: Roberts + offset: 13 + length: 7 + - matched: false diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml index 2d3cfa764e98c..e10852b4c93d6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -240,6 +240,21 @@ setup: "deduce_mappings": false } } + - match: { preview.0.airline: foo } + - match: { preview.0.by-hour: "2017-02-18T00:00:00.000Z" } + - match: { preview.0.avg_response: 1.0 } + - match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" } + - match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" } + - match: { preview.1.airline: bar } + - match: { preview.1.by-hour: "2017-02-18T01:00:00.000Z" } + - match: { preview.1.avg_response: 42.0 } + - match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" } + - match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" } + - match: { preview.2.airline: foo } + - match: { preview.2.by-hour: "2017-02-18T01:00:00.000Z" } + - match: { preview.2.avg_response: 42.0 } + - match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" } + - match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" } - match: { generated_dest_index.mappings.properties: {} } --- diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java index 3ba82e6c4513b..b7d9117a3f9dc 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/TextStructurePlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -20,8 +21,11 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.xpack.core.textstructure.action.FindStructureAction; +import org.elasticsearch.xpack.core.textstructure.action.TestGrokPatternAction; import org.elasticsearch.xpack.textstructure.rest.RestFindStructureAction; +import org.elasticsearch.xpack.textstructure.rest.RestTestGrokPatternAction; import org.elasticsearch.xpack.textstructure.transport.TransportFindStructureAction; +import org.elasticsearch.xpack.textstructure.transport.TransportTestGrokPatternAction; import java.util.Arrays; import java.util.List; @@ -38,6 +42,7 @@ public class TextStructurePlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -45,12 +50,14 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return Arrays.asList(new RestFindStructureAction()); + return Arrays.asList(new RestFindStructureAction(), new RestTestGrokPatternAction()); } @Override public List> getActions() { - return Arrays.asList(new ActionHandler<>(FindStructureAction.INSTANCE, TransportFindStructureAction.class)); + return Arrays.asList( + new ActionHandler<>(FindStructureAction.INSTANCE, TransportFindStructureAction.class), + new ActionHandler<>(TestGrokPatternAction.INSTANCE, TransportTestGrokPatternAction.class) + ); } - } diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestTestGrokPatternAction.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestTestGrokPatternAction.java new file mode 100644 index 0000000000000..c13534b6f90c2 --- /dev/null +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestTestGrokPatternAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.textstructure.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.textstructure.action.TestGrokPatternAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.textstructure.TextStructurePlugin.BASE_PATH; + +@ServerlessScope(Scope.INTERNAL) +public class RestTestGrokPatternAction extends BaseRestHandler { + + @Override + public String getName() { + return "text_structure_test_grok_pattern_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, BASE_PATH + "test_grok_pattern"), new Route(POST, BASE_PATH + "test_grok_pattern")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + TestGrokPatternAction.Request request; + String ecsCompatibility = restRequest.param(TestGrokPatternAction.Request.ECS_COMPATIBILITY.getPreferredName()); + try (XContentParser parser = restRequest.contentOrSourceParamParser()) { + request = TestGrokPatternAction.Request.parseRequest(ecsCompatibility, parser); + } + + return channel -> client.execute(TestGrokPatternAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/transport/TransportTestGrokPatternAction.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/transport/TransportTestGrokPatternAction.java new file mode 100644 index 0000000000000..f8ce7a1099952 --- /dev/null +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/transport/TransportTestGrokPatternAction.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.textstructure.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.grok.Grok; +import org.elasticsearch.grok.GrokBuiltinPatterns; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; +import org.elasticsearch.xpack.core.textstructure.action.TestGrokPatternAction; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.grok.GrokBuiltinPatterns.ECS_COMPATIBILITY_V1; + +public class TransportTestGrokPatternAction extends TransportAction { + + private static final Logger logger = LogManager.getLogger(TransportTestGrokPatternAction.class); + + private final ThreadPool threadPool; + + @Inject + public TransportTestGrokPatternAction(TransportService transportService, ActionFilters actionFilters, ThreadPool threadPool) { + super(TestGrokPatternAction.INSTANCE.name(), actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; + } + + @Override + protected void doExecute(Task task, TestGrokPatternAction.Request request, ActionListener listener) { + // As matching a regular expression might take a while, we run in a different thread to avoid blocking the network thread. + threadPool.generic().execute(ActionRunnable.supply(listener, () -> getResponse(request))); + } + + private TestGrokPatternAction.Response getResponse(TestGrokPatternAction.Request request) { + assert Transports.assertNotTransportThread("matching regexes is too expensive for a network thread"); + boolean ecsCompatibility = ECS_COMPATIBILITY_V1.equals(request.getEcsCompatibility()); + Grok grok = new Grok(GrokBuiltinPatterns.get(ecsCompatibility), request.getGrokPattern(), logger::debug); + List> ranges = new ArrayList<>(); + for (String text : request.getText()) { + ranges.add(grok.captureRanges(text)); + } + return new TestGrokPatternAction.Response(ranges); + } +} diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index 8f129789d46b7..13470e3c2e4ec 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -10,7 +10,6 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { testImplementation project(':x-pack:qa') - testImplementation project(':client:rest-high-level') } Version ccsCompatVersion = new Version(VersionProperties.getElasticsearchVersion().getMajor(), VersionProperties.getElasticsearchVersion().getMinor() - 1, 0) @@ -54,29 +53,17 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' - maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' - maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' - maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } -//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses -// the previous minor version, that setting is not available when running in FIPS until 8.14. -def maybeDisableForFips(task) { - if (BuildParams.inFipsJvm) { - if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { - task.enabled = false - } - } -} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle index c7df54992e06d..aab0d16e54f5c 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle @@ -3,7 +3,6 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) - javaRestTestImplementation project(":client:rest-high-level") } // location for keys and certificates diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java index 9ba4082ed14e1..60bb04fe6cb8a 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java @@ -58,7 +58,6 @@ public void cleanup() throws Exception { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100596") public void testTransformFeatureReset() throws Exception { String indexName = "basic-crud-reviews"; String transformId = "batch-transform-feature-reset"; @@ -90,8 +89,13 @@ public void testTransformFeatureReset() throws Exception { .build(); putTransform(continuousTransformId, Strings.toString(config), RequestOptions.DEFAULT); + + // Sleep for a few seconds so that we cover transform being stopped at various stages. + Thread.sleep(randomLongBetween(0, 5_000)); + startTransform(continuousTransformId, RequestOptions.DEFAULT); - client().performRequest(new Request(HttpPost.METHOD_NAME, "/_features/_reset")); + + assertOK(client().performRequest(new Request(HttpPost.METHOD_NAME, "/_features/_reset"))); Response response = adminClient().performRequest(new Request("GET", "/_cluster/state?metric=metadata")); Map metadata = (Map) ESRestTestCase.entityAsMap(response).get("metadata"); @@ -118,5 +122,4 @@ public void testTransformFeatureReset() throws Exception { Map transformIndices = ESRestTestCase.entityAsMap(adminClient().performRequest(new Request("GET", ".transform-*"))); assertThat("Indices were: " + transformIndices, transformIndices, is(anEmptyMap())); } - } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java new file mode 100644 index 0000000000000..1fb1b3ac0bc5c --- /dev/null +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class TransformChainIT extends TransformRestTestCase { + + private static final String SET_INGEST_TIME_PIPELINE = "set_ingest_time"; + private static final String TRANSFORM_CONFIG_TEMPLATE = """ + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s", + "pipeline": "%s" + }, + "sync": { + "time": { + "field": "event.ingested", + "delay": "10s" + } + }, + "frequency": "%s", + "pivot": { + "group_by": { + "timestamp": { + "date_histogram": { + "field": "timestamp", + "fixed_interval": "%s" + } + }, + "user_id": { + "terms": { + "field": "user_id" + } + } + }, + "aggregations": { + "stars": { + "sum": { + "field": "stars" + } + } + } + }, + "settings": { + "unattended": true, + "deduce_mappings": %s, + "use_point_in_time": %s + } + }"""; + + private TestThreadPool threadPool; + + @Before + public void setupTransformTests() throws IOException { + threadPool = new TestThreadPool(getTestName()); + + // Create destination index template. It will be used by all the transforms in this test. + Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_template"); + createIndexTemplateRequest.setJsonEntity(""" + { + "index_patterns": [ "my-transform-*-dest" ], + "mappings": { + "properties": { + "timestamp": { + "type": "date" + }, + "user_id": { + "type": "keyword" + }, + "stars": { + "type": "integer" + } + } + } + }"""); + createIndexTemplateRequest.setOptions(expectWarnings(RestPutIndexTemplateAction.DEPRECATION_WARNING)); + assertAcknowledged(client().performRequest(createIndexTemplateRequest)); + + // Create ingest pipeline which sets event.ingested field. This is needed for transform's synchronisation to work correctly. + Request putIngestPipelineRequest = new Request("PUT", "_ingest/pipeline/" + SET_INGEST_TIME_PIPELINE); + putIngestPipelineRequest.setJsonEntity(""" + { + "description": "Set ingest timestamp.", + "processors": [ + { + "set": { + "field": "event.ingested", + "value": "{{{_ingest.timestamp}}}" + } + } + ] + }"""); + assertOK(client().performRequest(putIngestPipelineRequest)); + + // Set logging levels for debugging. + Request settingsRequest = new Request("PUT", "/_cluster/settings"); + settingsRequest.setJsonEntity(""" + { + "persistent": { + "logger.org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer": "debug", + "logger.org.elasticsearch.xpack.transform": "debug", + "logger.org.elasticsearch.xpack.transform.notifications": "debug", + "logger.org.elasticsearch.xpack.transform.transforms": "debug" + } + }"""); + assertOK(client().performRequest(settingsRequest)); + } + + @After + public void shutdownThreadPool() { + if (threadPool != null) { + threadPool.shutdown(); + } + } + + public void testTwoChainedTransforms() throws Exception { + testChainedTransforms(2); + } + + public void testThreeChainedTransforms() throws Exception { + testChainedTransforms(3); + } + + private void testChainedTransforms(final int numTransforms) throws Exception { + final String reviewsIndexName = "reviews"; + final int numDocs = 100; + final Instant now = Instant.now(); + createReviewsIndex( + reviewsIndexName, + numDocs, + 100, + TransformIT::getUserIdForRow, + row -> Instant.ofEpochMilli(now.toEpochMilli() - 1000 * numDocs + 1000 * row).toString(), + SET_INGEST_TIME_PIPELINE + ); + + List transformIds = new ArrayList<>(numTransforms); + // Create the chain of transforms. Previous transform's destination index becomes next transform's source index. + String transformIdPrefix = "my-transform-" + randomAlphaOfLength(4).toLowerCase(Locale.ROOT) + "-" + numTransforms + "-"; + for (int i = 0; i < numTransforms; ++i) { + String transformId = transformIdPrefix + i; + transformIds.add(transformId); + // Set up the transform so that its source index is the destination index of the previous transform in the chain. + // The number of documents is expected to be the same in all the indices. + String sourceIndex = i == 0 ? reviewsIndexName : transformIds.get(i - 1) + "-dest"; + String destIndex = transformId + "-dest"; + assertFalse(indexExists(destIndex)); + + assertAcknowledged(putTransform(transformId, createTransformConfig(sourceIndex, destIndex), true, RequestOptions.DEFAULT)); + } + + List transformIdsShuffled = new ArrayList<>(transformIds); + Collections.shuffle(transformIdsShuffled, random()); + // Start all the transforms in random order so that sometimes the transform later in the chain needs to wait for its source index + // to become available. + for (String transformId : transformIdsShuffled) { + startTransform(transformId, RequestOptions.DEFAULT); + } + + // Give the transforms some time to finish processing. Since the transforms are continuous, we cannot wait for them to be STOPPED. + assertBusy(() -> { + // Verify that each transform processed an expected number of documents. + for (String transformId : transformIds) { + Map stats = getTransformStats(transformId); + assertThat( + "Stats were: " + stats, + XContentMapValues.extractValue(stats, "stats", "documents_processed"), + is(equalTo(numDocs)) + ); + } + }, 60, TimeUnit.SECONDS); + + // Stop all the transforms. + for (String transformId : transformIds) { + stopTransform(transformId); + } + // Delete all the transforms. + for (String transformId : transformIds) { + deleteTransform(transformId); + } + } + + private static String createTransformConfig(String sourceIndex, String destIndex) { + return Strings.format( + TRANSFORM_CONFIG_TEMPLATE, + sourceIndex, + destIndex, + SET_INGEST_TIME_PIPELINE, + "1s", + "1s", + randomBoolean(), + randomBoolean() + ); + } +} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 93615efdfba2b..1302f20838c4a 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -47,7 +47,6 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.oneOf; -@SuppressWarnings("removal") public class TransformIT extends TransformRestTestCase { private static final int NUM_USERS = 28; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index f154b13b32add..9c4241fa88ef5 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -164,15 +164,13 @@ protected void stopTransform(String id, boolean waitForCompletion, @Nullable Tim if (timeout != null) { stopTransformRequest.addParameter(TransformField.TIMEOUT.getPreferredName(), timeout.getStringRep()); } - Map stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest)); - assertThat(stopTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertAcknowledged(client().performRequest(stopTransformRequest)); } protected void startTransform(String id, RequestOptions options) throws IOException { Request startTransformRequest = new Request("POST", TRANSFORM_ENDPOINT + id + "/_start"); startTransformRequest.setOptions(options); - Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); - assertThat(startTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertAcknowledged(client().performRequest(startTransformRequest)); } // workaround for https://github.com/elastic/elasticsearch/issues/62204 @@ -221,16 +219,24 @@ protected void deleteTransform(String id, boolean force) throws IOException { assertOK(adminClient().performRequest(request)); } - protected void putTransform(String id, String config, RequestOptions options) throws IOException { + protected Response putTransform(String id, String config, RequestOptions options) throws IOException { + return putTransform(id, config, false, options); + } + + protected Response putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { if (createdTransformIds.contains(id)) { throw new IllegalArgumentException("transform [" + id + "] is already registered"); } - Request put = new Request("PUT", TRANSFORM_ENDPOINT + id); - put.setJsonEntity(config); - put.setOptions(options); - assertOK(client().performRequest(put)); + Request request = new Request("PUT", TRANSFORM_ENDPOINT + id); + request.setJsonEntity(config); + if (deferValidation) { + request.addParameter("defer_validation", "true"); + } + request.setOptions(options); + Response response = assertOK(client().performRequest(request)); createdTransformIds.add(id); + return response; } protected Map previewTransform(String transformConfig, RequestOptions options) throws IOException { @@ -396,7 +402,14 @@ protected TransformConfig.Builder createTransformConfigBuilder( } protected void updateConfig(String id, String update, RequestOptions options) throws Exception { + updateConfig(id, update, false, options); + } + + protected void updateConfig(String id, String update, boolean deferValidation, RequestOptions options) throws Exception { Request updateRequest = new Request("POST", "_transform/" + id + "/_update"); + if (deferValidation) { + updateRequest.addParameter("defer_validation", String.valueOf(deferValidation)); + } updateRequest.setJsonEntity(update); updateRequest.setOptions(options); assertOK(client().performRequest(updateRequest)); @@ -408,6 +421,17 @@ protected void createReviewsIndex( int numUsers, Function userIdProvider, Function dateStringProvider + ) throws Exception { + createReviewsIndex(indexName, numDocs, numUsers, userIdProvider, dateStringProvider, null); + } + + protected void createReviewsIndex( + String indexName, + int numDocs, + int numUsers, + Function userIdProvider, + Function dateStringProvider, + String defaultPipeline ) throws Exception { assert numUsers > 0; @@ -448,6 +472,9 @@ protected void createReviewsIndex( .endObject() .endObject() .endObject(); + if (defaultPipeline != null) { + builder.startObject("settings").field("index.default_pipeline", defaultPipeline).endObject(); + } } builder.endObject(); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java index 7a047b14ec83e..e27d6a224802c 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java @@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -@SuppressWarnings("removal") public class TransformUsingSearchRuntimeFieldsIT extends TransformRestTestCase { private static final String REVIEWS_INDEX_NAME = "basic-crud-reviews"; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/ContinuousTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/ContinuousTestCase.java index e68885e927b96..7b83a16ff723d 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/ContinuousTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/ContinuousTestCase.java @@ -39,7 +39,6 @@ import static java.time.temporal.ChronoField.NANO_OF_SECOND; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; -@SuppressWarnings("removal") public abstract class ContinuousTestCase extends ESRestTestCase { public static final TimeValue SYNC_DELAY = new TimeValue(1, TimeUnit.SECONDS); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java index cd4b2004b02f7..5eac2bd2ebdf6 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java @@ -83,7 +83,7 @@ */ public class TransformContinuousIT extends TransformRestTestCase { - public static final int MAX_WAIT_TIME_ONE_ITERATION_SECONDS = 60; + public static final int MAX_WAIT_TIME_ONE_ITERATION_SECONDS = 120; private List transformTestCases = new ArrayList<>(); @Before @@ -136,7 +136,6 @@ public void removePipelines() throws IOException { deletePipeline(ContinuousTestCase.INGEST_PIPELINE); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97263") public void testContinuousEvents() throws Exception { String sourceIndexName = ContinuousTestCase.CONTINUOUS_EVENTS_SOURCE_INDEX; DecimalFormat numberFormat = new DecimalFormat("000", new DecimalFormatSymbols(Locale.ROOT)); diff --git a/x-pack/plugin/transform/qa/single-node-tests/build.gradle b/x-pack/plugin/transform/qa/single-node-tests/build.gradle index c321b3aeb0289..d4f84ecb37c9c 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/single-node-tests/build.gradle @@ -4,7 +4,6 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) - javaRestTestImplementation project(":client:rest-high-level") } testClusters.configureEach { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java index 5d46c9933f48a..29576231d848c 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java @@ -182,7 +182,7 @@ private void testTransformDestIndexMappings(String transformId, boolean deduceMa } } }""", destIndex); - Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_no_deduce_template"); + Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_mappings_template"); createIndexTemplateRequest.setJsonEntity(destIndexTemplate); createIndexTemplateRequest.setOptions(expectWarnings(RestPutIndexTemplateAction.DEPRECATION_WARNING)); Map createIndexTemplateResponse = entityAsMap(client().performRequest(createIndexTemplateRequest)); @@ -261,6 +261,9 @@ private void testTransformDestIndexMappings(String transformId, boolean deduceMa ) ) ); + Map searchResult = getAsMap(destIndex + "/_search?q=reviewer:user_0"); + String timestamp = (String) ((List) XContentMapValues.extractValue("hits.hits._source.timestamp", searchResult)).get(0); + assertThat(timestamp, is(equalTo("2017-01-10T10:10:10.000Z"))); } private static void assertAliases(String index, String... aliases) throws IOException { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java index 4321306870bd1..37c3d774e59e6 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java @@ -35,7 +35,6 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; -@SuppressWarnings("removal") public class TransformGetAndGetStatsIT extends TransformRestTestCase { private static final String TEST_USER_NAME = "transform_user"; @@ -159,7 +158,6 @@ public void testGetAndGetStats() throws Exception { stopTransform("pivot_continuous", false); } - @SuppressWarnings("unchecked") public void testGetAndGetStatsForTransformWithoutConfig() throws Exception { createPivotReviewsTransform("pivot_1", "pivot_reviews_1", null); createPivotReviewsTransform("pivot_2", "pivot_reviews_2", null); @@ -219,7 +217,6 @@ public void testGetAndGetStatsForTransformWithoutConfig() throws Exception { stopTransform("pivot_continuous", true); } - @SuppressWarnings("unchecked") public void testGetAndGetStatsWhenTransformInternalIndexDisappears() throws Exception { createPivotReviewsTransform("pivot_1", "pivot_reviews_1", null); createPivotReviewsTransform("pivot_2", "pivot_reviews_2", null); @@ -327,7 +324,6 @@ private List> verifyGetStatsResponse(String path, int expect return transformsStats; } - @SuppressWarnings("unchecked") public void testGetPersistedStatsWithoutTask() throws Exception { createPivotReviewsTransform("pivot_stats_1", "pivot_reviews_stats_1", null); startAndWaitForTransform("pivot_stats_1", "pivot_reviews_stats_1"); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformLatestRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformLatestRestIT.java index 9224b838b058b..af5fe9f180a4e 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformLatestRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformLatestRestIT.java @@ -147,4 +147,117 @@ public void testLatestWithAggregateMetricDoubleAsUniqueKey() throws Exception { ); } } + + public void testContinuousLatestWithFrom_NoDocs() throws Exception { + testContinuousLatestWithFrom("latest_from_no_docs", "reviews_from_no_docs", "2017-02-20", 0); + } + + public void testContinuousLatestWithFrom_OneDoc() throws Exception { + testContinuousLatestWithFrom("latest_from_one_doc", "reviews_from_one_doc", "2017-02-10", 1); + } + + public void testContinuousLatestWithFrom_AllDocs_FromNull() throws Exception { + testContinuousLatestWithFrom("latest_from_all_docs_from_null", "reviews_from_all_docs_from_null", null, 28); + } + + public void testContinuousLatestWithFrom_AllDocs() throws Exception { + testContinuousLatestWithFrom("latest_from_all_docs", "reviews_from_all_docs", "2017-01-01", 28); + } + + private void testContinuousLatestWithFrom(String transformId, String indexName, String from, int expectedDestNumDocs) throws Exception { + createReviewsIndex(indexName); + String transformIndex = transformId + "-dest"; + setupDataAccessRole(DATA_ACCESS_ROLE, indexName, transformIndex); + Request createTransformRequest = createRequestWithAuth( + "PUT", + getTransformEndpoint() + transformId, + BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS + ); + String config = Strings.format(""" + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s" + }, + "frequency": "1s", + "sync": { + "time": { + "field": "timestamp", + "delay": "1s" + } + }, + "latest": { + "unique_key": [ "user_id" ], + "sort": "timestamp" + } + }""", indexName, transformIndex); + createTransformRequest.setJsonEntity(config); + Map createTransformResponse = entityAsMap(client().performRequest(createTransformRequest)); + assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + assertSourceIndexContents(indexName, 1000, "2017-01-10T10:10:10.000Z", "2017-01-30T22:34:38.000Z"); + + { + StringBuilder bulk = new StringBuilder(); + bulk.append(Strings.format(""" + {"index":{"_index":"%s"}} + {"user_id":"user_%s","business_id":"business_%s","stars":%s,"location":"%s","timestamp":%s} + """, indexName, 666, 777, 7, 888, "\"2017-02-15\"")); + bulk.append("\r\n"); + + Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + Map bulkResponse = entityAsMap(client().performRequest(bulkRequest)); + assertThat(bulkResponse.get("errors"), equalTo(Boolean.FALSE)); + } + + assertSourceIndexContents(indexName, 1001, "2017-01-10T10:10:10.000Z", "2017-02-15T00:00:00.000Z"); + + startAndWaitForContinuousTransform(transformId, transformIndex, null, from, 1L); + assertTrue(indexExists(transformIndex)); + + Map transformIndexStats = getAsMap(transformIndex + "/_stats"); + assertThat( + "Stats were: " + transformIndexStats, + XContentMapValues.extractValue("_all.total.docs.count", transformIndexStats), + is(equalTo(expectedDestNumDocs)) + ); + + stopTransform(transformId, false); + deleteIndex(indexName); + } + + private void assertSourceIndexContents(String indexName, int expectedNumDocs, String expectedMinTimestamp, String expectedMaxTimestamp) + throws IOException { + Request searchRequest = new Request("GET", indexName + "/_search"); + searchRequest.setJsonEntity(""" + { + "size": 0, + "aggregations": { + "min_timestamp": { + "min": { + "field": "timestamp" + } + }, + "max_timestamp": { + "max": { + "field": "timestamp" + } + } + } + }"""); + Map searchResponse = entityAsMap(client().performRequest(searchRequest)); + assertThat(XContentMapValues.extractValue("hits.total.value", searchResponse), is(equalTo(expectedNumDocs))); + assertThat( + XContentMapValues.extractValue("aggregations.min_timestamp.value_as_string", searchResponse), + is(equalTo(expectedMinTimestamp)) + ); + assertThat( + XContentMapValues.extractValue("aggregations.max_timestamp.value_as_string", searchResponse), + is(equalTo(expectedMaxTimestamp)) + ); + } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 925e6d5381770..6aeca79b4aa17 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -21,10 +21,6 @@ import java.io.IOException; import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,7 +59,7 @@ protected boolean preserveIndicesUponCompletion() { @Before public void createIndexes() throws IOException { setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME); - setupUser(TEST_USER_NAME, Arrays.asList("transform_admin", DATA_ACCESS_ROLE)); + setupUser(TEST_USER_NAME, List.of("transform_admin", DATA_ACCESS_ROLE)); // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack if (indicesCreated) { @@ -894,6 +890,15 @@ public void testPivotWithTermsAgg() throws Exception { } } }, + "common_users_desc": { + "terms": { + "field": "user_id", + "size": 3, + "order": { + "_key": "desc" + } + } + }, "rare_users": { "rare_terms": { "field": "user_id" @@ -922,33 +927,32 @@ public void testPivotWithTermsAgg() throws Exception { "hits.hits._source.common_users", searchResult )).get(0); + assertThat(commonUsers, is(not(nullValue()))); + assertThat( + commonUsers, + equalTo( + Map.of( + "user_10", + Map.of("common_businesses", Map.of("business_12", 6, "business_9", 4)), + "user_0", + Map.of("common_businesses", Map.of("business_0", 35)) + ) + ) + ); + Map commonUsersDesc = (Map) ((List) XContentMapValues.extractValue( + "hits.hits._source.common_users_desc", + searchResult + )).get(0); + assertThat(commonUsersDesc, is(not(nullValue()))); + // 3 user names latest in lexicographic order (user_7, user_8, user_9) are selected properly but their order is not preserved. + // See https://github.com/elastic/elasticsearch/issues/104847 for more information. + assertThat(commonUsersDesc, equalTo(Map.of("user_7", 6, "user_9", 2, "user_8", 8))); Map rareUsers = (Map) ((List) XContentMapValues.extractValue( "hits.hits._source.rare_users", searchResult )).get(0); - assertThat(commonUsers, is(not(nullValue()))); - assertThat(commonUsers, equalTo(new HashMap<>() { - { - put("user_10", Collections.singletonMap("common_businesses", new HashMap<>() { - { - put("business_12", 6); - put("business_9", 4); - } - })); - put("user_0", Collections.singletonMap("common_businesses", new HashMap<>() { - { - put("business_0", 35); - } - })); - } - })); assertThat(rareUsers, is(not(nullValue()))); - assertThat(rareUsers, equalTo(new HashMap<>() { - { - put("user_5", 1); - put("user_12", 1); - } - })); + assertThat(rareUsers, is(equalTo(Map.of("user_5", 1, "user_12", 1)))); } private void assertDateHistogramPivot(String indexName) throws Exception { @@ -1184,8 +1188,8 @@ private void testPreviewTransform(String queryJson) throws Exception { List> preview = (List>) previewTransformResponse.get("preview"); // preview is limited to 100 assertThat(preview.size(), equalTo(100)); - Set expectedTopLevelFields = new HashSet<>(Arrays.asList("user", "by_day")); - Set expectedNestedFields = new HashSet<>(Arrays.asList("id", "avg_rating")); + Set expectedTopLevelFields = Set.of("user", "by_day"); + Set expectedNestedFields = Set.of("id", "avg_rating"); preview.forEach(p -> { Set keys = p.keySet(); assertThat(keys, equalTo(expectedTopLevelFields)); @@ -1255,8 +1259,8 @@ public void testPreviewTransformWithPipeline() throws Exception { List> preview = (List>) previewTransformResponse.get("preview"); // preview is limited to 100 assertThat(preview.size(), equalTo(100)); - Set expectedTopLevelFields = new HashSet<>(Arrays.asList("user", "by_day", "pipeline_field")); - Set expectedNestedFields = new HashSet<>(Arrays.asList("id", "avg_rating")); + Set expectedTopLevelFields = Set.of("user", "by_day", "pipeline_field"); + Set expectedNestedFields = Set.of("id", "avg_rating"); preview.forEach(p -> { Set keys = p.keySet(); assertThat(keys, equalTo(expectedTopLevelFields)); @@ -1383,8 +1387,11 @@ private List previewWithOffset(String offset) throws IOException { } } } + }, + "settings": { + "deduce_mappings": %s } - }""", REVIEWS_INDEX_NAME, offset); + }""", REVIEWS_INDEX_NAME, offset, randomBoolean()); createPreviewRequest.setJsonEntity(config); Map previewTransformResponse = entityAsMap(client().performRequest(createPreviewRequest)); diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java index dde0698056ab2..77a0cc7f79841 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineConfig; @@ -223,15 +224,18 @@ private void testSearchAction(QueryBuilder query, boolean ccsMinimizeRoundtrips, } public void testGetCheckpointAction_MatchAllQuery() throws InterruptedException { + final var threadContext = client().threadPool().getThreadContext(); testGetCheckpointAction( - client(), + threadContext, + CheckpointClient.local(client()), null, new String[] { "local_*" }, QueryBuilders.matchAllQuery(), Set.of("local_old_index", "local_new_index") ); testGetCheckpointAction( - client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE), + threadContext, + CheckpointClient.remote(client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE)), REMOTE_CLUSTER, new String[] { "remote_*" }, QueryBuilders.matchAllQuery(), @@ -240,15 +244,18 @@ public void testGetCheckpointAction_MatchAllQuery() throws InterruptedException } public void testGetCheckpointAction_RangeQuery() throws InterruptedException { + final var threadContext = client().threadPool().getThreadContext(); testGetCheckpointAction( - client(), + threadContext, + CheckpointClient.local(client()), null, new String[] { "local_*" }, QueryBuilders.rangeQuery("@timestamp").from(timestamp), Set.of("local_new_index") ); testGetCheckpointAction( - client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE), + threadContext, + CheckpointClient.remote(client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE)), REMOTE_CLUSTER, new String[] { "remote_*" }, QueryBuilders.rangeQuery("@timestamp").from(timestamp), @@ -257,15 +264,18 @@ public void testGetCheckpointAction_RangeQuery() throws InterruptedException { } public void testGetCheckpointAction_RangeQueryThatMatchesNoShards() throws InterruptedException { + final var threadContext = client().threadPool().getThreadContext(); testGetCheckpointAction( - client(), + threadContext, + CheckpointClient.local(client()), null, new String[] { "local_*" }, QueryBuilders.rangeQuery("@timestamp").from(100_000_000), Set.of() ); testGetCheckpointAction( - client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE), + threadContext, + CheckpointClient.remote(client().getRemoteClusterClient(REMOTE_CLUSTER, EsExecutors.DIRECT_EXECUTOR_SERVICE)), REMOTE_CLUSTER, new String[] { "remote_*" }, QueryBuilders.rangeQuery("@timestamp").from(100_000_000), @@ -273,8 +283,14 @@ public void testGetCheckpointAction_RangeQueryThatMatchesNoShards() throws Inter ); } - private void testGetCheckpointAction(Client client, String cluster, String[] indices, QueryBuilder query, Set expectedIndices) - throws InterruptedException { + private void testGetCheckpointAction( + ThreadContext threadContext, + CheckpointClient client, + String cluster, + String[] indices, + QueryBuilder query, + Set expectedIndices + ) throws InterruptedException { final GetCheckpointAction.Request request = new GetCheckpointAction.Request( indices, IndicesOptions.LENIENT_EXPAND_OPEN, @@ -286,19 +302,13 @@ private void testGetCheckpointAction(Client client, String cluster, String[] ind CountDownLatch latch = new CountDownLatch(1); SetOnce finalResponse = new SetOnce<>(); SetOnce finalException = new SetOnce<>(); - ClientHelper.executeAsyncWithOrigin( - client, - TRANSFORM_ORIGIN, - GetCheckpointAction.INSTANCE, - request, - ActionListener.wrap(response -> { - finalResponse.set(response); - latch.countDown(); - }, e -> { - finalException.set(e); - latch.countDown(); - }) - ); + ClientHelper.executeAsyncWithOrigin(threadContext, TRANSFORM_ORIGIN, request, ActionListener.wrap(response -> { + finalResponse.set(response); + latch.countDown(); + }, e -> { + finalException.set(e); + latch.countDown(); + }), client::getCheckpoint); latch.await(10, TimeUnit.SECONDS); assertThat(finalException.get(), is(nullValue())); diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java index b952869a34d88..acb77ce1db4b4 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java @@ -137,6 +137,30 @@ public void testGetCheckpointWithQueryThatFiltersOutEverything() throws Exceptio assertThat("Response was: " + response.getCheckpoints(), response.getCheckpoints(), is(anEmptyMap())); } + public void testGetCheckpointWithMissingIndex() throws Exception { + GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { "test_index_missing" }, + IndicesOptions.LENIENT_EXPAND_OPEN, + null, + null, + TimeValue.timeValueSeconds(5) + ); + + GetCheckpointAction.Response response = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertThat("Response was: " + response.getCheckpoints(), response.getCheckpoints(), is(anEmptyMap())); + + request = new GetCheckpointAction.Request( + new String[] { "test_index_missing-*" }, + IndicesOptions.LENIENT_EXPAND_OPEN, + null, + null, + TimeValue.timeValueSeconds(5) + ); + + response = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertThat("Response was: " + response.getCheckpoints(), response.getCheckpoints(), is(anEmptyMap())); + } + public void testGetCheckpointTimeoutExceeded() throws Exception { final String indexNamePrefix = "test_index-"; final int indices = 100; diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformProgressIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformProgressIT.java index 327f44175574f..c62ff49ae6865 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformProgressIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformProgressIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -@SuppressWarnings("removal") public class TransformProgressIT extends TransformSingleNodeTestCase { private static final String REVIEWS_INDEX_NAME = "reviews"; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 98c95c5a9803a..cb118cead1dc9 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -166,6 +167,7 @@ protected XPackLicenseState getLicenseState() { @Override public List getRestHandlers( final Settings unused, + NamedWriteableRegistry namedWriteableRegistry, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, @@ -402,7 +404,7 @@ public void cleanUpFeature( SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener); }, unsetResetModeListener::onFailure); - ActionListener afterStoppingTransforms = ActionListener.wrap(stopTransformsResponse -> { + ActionListener afterForceStoppingTransforms = ActionListener.wrap(stopTransformsResponse -> { if (stopTransformsResponse.isAcknowledged() && stopTransformsResponse.getTaskFailures().isEmpty() && stopTransformsResponse.getNodeFailures().isEmpty()) { @@ -437,12 +439,31 @@ public void cleanUpFeature( } }, unsetResetModeListener::onFailure); + ActionListener afterStoppingTransforms = ActionListener.wrap( + afterForceStoppingTransforms::onResponse, + e -> { + logger.info("Error while trying to stop the transforms, will try again with force=true", e); + StopTransformAction.Request forceStopTransformsRequest = new StopTransformAction.Request( + Metadata.ALL, + true, + // Set force=true to make sure all the transforms persistent tasks are stopped. + true, + null, + true, + false + ); + client.execute(StopTransformAction.INSTANCE, forceStopTransformsRequest, afterForceStoppingTransforms); + } + ); + ActionListener afterResetModeSet = ActionListener.wrap(response -> { StopTransformAction.Request stopTransformsRequest = new StopTransformAction.Request( Metadata.ALL, true, - true, - null, + // Set force=false in order to let transforms finish gracefully. + false, + // Do not give it too much time. If there is a problem, there will be another try with force=true. + TimeValue.timeValueSeconds(10), true, false ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 2f3ed29ea08fc..d4e03475af22e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; import org.elasticsearch.tasks.Task; @@ -180,7 +180,7 @@ protected void masterOperation( * @param aggs aggs returned by the search * @return feature usage map */ - private static Map getFeatureCounts(Aggregations aggs) { + private static Map getFeatureCounts(InternalAggregations aggs) { Filters filters = aggs.get(FEATURE_COUNTS); return filters.getBuckets().stream().collect(toMap(Filters.Bucket::getKeyAsString, Filters.Bucket::getDocCount)); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java index 98777b47543cb..935ff04c47d85 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java @@ -299,7 +299,7 @@ private static void updateTransformConfiguration( TransformAuditor auditor, IndexNameExpressionResolver indexNameExpressionResolver, TransformConfig config, - Map mappings, + Map destIndexMappings, SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ClusterState clusterState, Settings destIndexSettings, @@ -355,7 +355,7 @@ private static void updateTransformConfiguration( clusterState, config, destIndexSettings, - mappings, + destIndexMappings, createDestinationListener ); } else { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 8e0a935ffaa53..4eded1aa0b5a6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.DestAlias; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; @@ -65,6 +66,7 @@ import java.util.Map; import java.util.stream.Collectors; +import static java.util.Collections.emptyMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.DUMMY_DEST_INDEX_FOR_PREVIEW; import static org.elasticsearch.xpack.transform.utils.SecondaryAuthorizationUtils.getSecurityHeadersPreferringSecondary; @@ -153,6 +155,7 @@ protected void doExecute(Task task, Request request, ActionListener li config.getDestination().getIndex(), config.getDestination().getAliases(), config.getSyncConfig(), + config.getSettings(), listener ), listener::onFailure @@ -208,6 +211,7 @@ private void getPreview( String dest, List aliases, SyncConfig syncConfig, + SettingsConfig settingsConfig, ActionListener listener ) { Client parentTaskClient = new ParentTaskAssigningClient(client, parentTaskId); @@ -285,12 +289,17 @@ private void getPreview( }, listener::onFailure); ActionListener> deduceMappingsListener = ActionListener.wrap(deducedMappings -> { - mappings.set(deducedMappings); + if (Boolean.FALSE.equals(settingsConfig.getDeduceMappings())) { + mappings.set(emptyMap()); + } else { + mappings.set(deducedMappings); + } function.preview( parentTaskClient, timeout, filteredHeaders, source, + // Use deduced mappings for generating preview even if "settings.deduce_mappings" is set to false deducedMappings, NUMBER_OF_PREVIEW_BUCKETS, previewListener diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 4c86aed335ac1..2c9fc8ffce5bf 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -109,7 +109,7 @@ protected void masterOperation(Task task, Request request, ClusterState clusterS // <3> Create the transform ActionListener validateTransformListener = ActionListener.wrap( - validationResponse -> putTransform(request, listener), + unusedValidationResponse -> putTransform(request, listener), listener::onFailure ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointClient.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointClient.java new file mode 100644 index 0000000000000..6a65bd637f914 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointClient.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.checkpoint; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.get.GetIndexAction; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.client.internal.RemoteClusterClient; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; + +/** + * Adapter interface so that the actions needed for {@link DefaultCheckpointProvider} can execute in the same way when using either an + * {@link ElasticsearchClient} (for local-cluster requests) and a {@link RemoteClusterClient} (for remote-cluster requests). + */ +interface CheckpointClient { + + /** + * Execute {@link GetIndexAction}. + */ + void getIndex(GetIndexRequest request, ActionListener listener); + + /** + * Execute {@link IndicesStatsAction}. + */ + void getIndicesStats(IndicesStatsRequest request, ActionListener listener); + + /** + * Execute {@link GetCheckpointAction}. + */ + void getCheckpoint(GetCheckpointAction.Request request, ActionListener listener); + + /** + * Construct a {@link CheckpointClient} which executes its requests on the local cluster. + */ + static CheckpointClient local(ElasticsearchClient client) { + return new CheckpointClient() { + @Override + public void getIndex(GetIndexRequest request, ActionListener listener) { + client.execute(GetIndexAction.INSTANCE, request, listener); + } + + @Override + public void getIndicesStats(IndicesStatsRequest request, ActionListener listener) { + client.execute(IndicesStatsAction.INSTANCE, request, listener); + } + + @Override + public void getCheckpoint(GetCheckpointAction.Request request, ActionListener listener) { + client.execute(GetCheckpointAction.INSTANCE, request, listener); + } + }; + } + + /** + * Construct a {@link CheckpointClient} which executes its requests on a remote cluster. + */ + static CheckpointClient remote(RemoteClusterClient client) { + return new CheckpointClient() { + @Override + public void getIndex(GetIndexRequest request, ActionListener listener) { + client.execute(GetIndexAction.REMOTE_TYPE, request, listener); + } + + @Override + public void getIndicesStats(IndicesStatsRequest request, ActionListener listener) { + client.execute(IndicesStatsAction.REMOTE_TYPE, request, listener); + } + + @Override + public void getCheckpoint(GetCheckpointAction.Request request, ActionListener listener) { + client.execute(GetCheckpointAction.REMOTE_TYPE, request, listener); + } + }; + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java index b9b7d9d8477cb..f60429f954b78 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java @@ -11,15 +11,14 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; @@ -129,9 +128,12 @@ protected void getIndexCheckpoints(TimeValue timeout, ActionListener(resolvedIndexes.numClusters(), mergeMapsListener); } + final var threadContext = client.threadPool().getThreadContext(); + if (resolvedIndexes.getLocalIndices().isEmpty() == false) { getCheckpointsFromOneCluster( - client, + threadContext, + CheckpointClient.local(client), timeout, transformConfig.getHeaders(), resolvedIndexes.getLocalIndices().toArray(new String[0]), @@ -143,12 +145,9 @@ protected void getIndexCheckpoints(TimeValue timeout, ActionListener> remoteIndex : resolvedIndexes.getRemoteIndicesPerClusterAlias().entrySet()) { String cluster = remoteIndex.getKey(); - ParentTaskAssigningClient remoteClient = new ParentTaskAssigningClient( - client.getRemoteClusterClient(cluster, EsExecutors.DIRECT_EXECUTOR_SERVICE), - client.getParentTask() - ); getCheckpointsFromOneCluster( - remoteClient, + threadContext, + CheckpointClient.remote(client.getRemoteClusterClient(cluster, EsExecutors.DIRECT_EXECUTOR_SERVICE)), timeout, transformConfig.getHeaders(), remoteIndex.getValue().toArray(new String[0]), @@ -163,7 +162,8 @@ protected void getIndexCheckpoints(TimeValue timeout, ActionListener headers, String[] indices, @@ -172,36 +172,46 @@ private void getCheckpointsFromOneCluster( ActionListener> listener ) { if (fallbackToBWC.contains(cluster)) { - getCheckpointsFromOneClusterBWC(client, timeout, headers, indices, cluster, listener); + getCheckpointsFromOneClusterBWC(threadContext, client, timeout, headers, indices, cluster, listener); } else { - getCheckpointsFromOneClusterV2(client, timeout, headers, indices, query, cluster, ActionListener.wrap(response -> { - logger.debug( - "[{}] Successfully retrieved checkpoints from cluster [{}] using transform checkpoint API", - transformConfig.getId(), - cluster - ); - listener.onResponse(response); - }, e -> { - Throwable unwrappedException = ExceptionsHelper.unwrapCause(e); - if (unwrappedException instanceof ActionNotFoundTransportException) { - // this is an implementation detail, so not necessary to audit or warn, but only report as debug + getCheckpointsFromOneClusterV2( + threadContext, + client, + timeout, + headers, + indices, + query, + cluster, + ActionListener.wrap(response -> { logger.debug( - "[{}] Cluster [{}] does not support transform checkpoint API, falling back to legacy checkpointing", + "[{}] Successfully retrieved checkpoints from cluster [{}] using transform checkpoint API", transformConfig.getId(), cluster ); - - fallbackToBWC.add(cluster); - getCheckpointsFromOneClusterBWC(client, timeout, headers, indices, cluster, listener); - } else { - listener.onFailure(e); - } - })); + listener.onResponse(response); + }, e -> { + Throwable unwrappedException = ExceptionsHelper.unwrapCause(e); + if (unwrappedException instanceof ActionNotFoundTransportException) { + // this is an implementation detail, so not necessary to audit or warn, but only report as debug + logger.debug( + "[{}] Cluster [{}] does not support transform checkpoint API, falling back to legacy checkpointing", + transformConfig.getId(), + cluster + ); + + fallbackToBWC.add(cluster); + getCheckpointsFromOneClusterBWC(threadContext, client, timeout, headers, indices, cluster, listener); + } else { + listener.onFailure(e); + } + }) + ); } } private static void getCheckpointsFromOneClusterV2( - ParentTaskAssigningClient client, + ThreadContext threadContext, + CheckpointClient client, TimeValue timeout, Map headers, String[] indices, @@ -240,12 +250,12 @@ private static void getCheckpointsFromOneClusterV2( } ClientHelper.executeWithHeadersAsync( + threadContext, headers, ClientHelper.TRANSFORM_ORIGIN, - client, - GetCheckpointAction.INSTANCE, getCheckpointRequest, - checkpointListener + checkpointListener, + client::getCheckpoint ); } @@ -253,7 +263,8 @@ private static void getCheckpointsFromOneClusterV2( * BWC fallback for nodes/cluster older than 8.2 */ private static void getCheckpointsFromOneClusterBWC( - ParentTaskAssigningClient client, + ThreadContext threadContext, + CheckpointClient client, TimeValue timeout, Map headers, String[] indices, @@ -266,10 +277,9 @@ private static void getCheckpointsFromOneClusterBWC( .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); ClientHelper.executeWithHeadersAsync( + threadContext, headers, ClientHelper.TRANSFORM_ORIGIN, - client, - GetIndexAction.INSTANCE, getIndexRequest, ActionListener.wrap(getIndexResponse -> { Set userIndices = getIndexResponse.getIndices() != null @@ -277,9 +287,8 @@ private static void getCheckpointsFromOneClusterBWC( : Collections.emptySet(); // 2nd get stats request ClientHelper.executeAsyncWithOrigin( - client, + threadContext, ClientHelper.TRANSFORM_ORIGIN, - IndicesStatsAction.INSTANCE, new IndicesStatsRequest().indices(indices).timeout(timeout).clear().indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN), ActionListener.wrap(response -> { if (response.getFailedShards() != 0) { @@ -305,9 +314,11 @@ private static void getCheckpointsFromOneClusterBWC( return; } listener.onResponse(extractIndexCheckPoints(response.getShards(), userIndices, cluster)); - }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) + }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))), + client::getIndicesStats ); - }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) + }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))), + client::getIndex ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 4fa19450a900c..6da7b6190bd45 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.transform.notifications; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.client.internal.Client; @@ -30,6 +32,8 @@ */ public class TransformAuditor extends AbstractAuditor { + private static final Logger logger = LogManager.getLogger(TransformAuditor.class); + private volatile boolean isResetMode = false; private final boolean includeNodeInfo; @@ -45,12 +49,12 @@ public TransformAuditor(Client client, String nodeName, ClusterService clusterSe ComposableIndexTemplate.builder() .template(TransformInternalIndex.getAuditIndexTemplate()) .version((long) TransformConfigVersion.CURRENT.id()) - .indexPatterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX + "*")) + .indexPatterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN)) .priority(Long.MAX_VALUE) .build() ); } catch (IOException e) { - throw new ElasticsearchException("Failure creating transform notification index", e); + throw new ElasticsearchException("Failure creating transform notification index template request", e); } }, nodeName, @@ -59,7 +63,12 @@ public TransformAuditor(Client client, String nodeName, ClusterService clusterSe ); clusterService.addListener(event -> { if (event.metadataChanged()) { - isResetMode = TransformMetadata.getTransformMetadata(event.state()).isResetMode(); + boolean oldIsResetMode = isResetMode; + boolean newIsResetMode = TransformMetadata.getTransformMetadata(event.state()).isResetMode(); + if (oldIsResetMode != newIsResetMode) { + logger.debug("TransformAuditor has noticed change of isResetMode bit from {} to {}", oldIsResetMode, newIsResetMode); + } + isResetMode = newIsResetMode; } }); this.includeNodeInfo = includeNodeInfo; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index 843dee43706f8..1d44ed5a1f8ef 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; @@ -27,6 +26,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -848,7 +848,7 @@ public void refresh(ActionListener listener) { client.threadPool().getThreadContext(), TRANSFORM_ORIGIN, new RefreshRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME), - ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure), + ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure), client.admin().indices()::refresh ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java index a08612fa4be72..fe3d4ede898bc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java @@ -40,6 +40,7 @@ import java.util.Map; import java.util.Set; +import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toMap; @@ -138,7 +139,7 @@ public static void createDestinationIndex( if (dest.length == 0) { TransformDestIndexSettings generatedDestIndexSettings = createTransformDestIndexSettings( destIndexSettings, - destIndexMappings, + Boolean.FALSE.equals(config.getSettings().getDeduceMappings()) ? emptyMap() : destIndexMappings, config.getId(), Clock.systemUTC() ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 1b8d14c6cdc2f..55f0290c20a1c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -288,13 +288,13 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene SchemaUtil.getDestinationFieldMappings(client, getConfig().getDestination().getIndex(), fieldMappingsListener); } - void validate(ActionListener listener) { + void validate(ActionListener listener) { ClientHelper.executeAsyncWithOrigin( client, ClientHelper.TRANSFORM_ORIGIN, ValidateTransformAction.INSTANCE, new ValidateTransformAction.Request(transformConfig, false, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT), - ActionListener.wrap(response -> listener.onResponse(null), listener::onFailure) + listener ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 4128ae42f53e3..f6d4ae2d53c9a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; @@ -58,6 +60,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; +import static java.util.Collections.emptyMap; import static org.elasticsearch.core.Strings.format; public abstract class TransformIndexer extends AsyncTwoPhaseIndexer { @@ -174,7 +177,7 @@ public TransformIndexer( abstract void persistState(TransformState state, ActionListener listener); - abstract void validate(ActionListener listener); + abstract void validate(ActionListener listener); @Override protected String getJobId() { @@ -265,6 +268,8 @@ protected void onStart(long now, ActionListener listener) { return; } + SetOnce> deducedDestIndexMappings = new SetOnce<>(); + ActionListener finalListener = ActionListener.wrap(r -> { try { // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory @@ -326,8 +331,14 @@ protected void onStart(long now, ActionListener listener) { } }, listener::onFailure); - ActionListener> fieldMappingsListener = ActionListener.wrap(mappings -> { - this.fieldMappings = mappings; + ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { + if (destIndexMappings.isEmpty() == false) { + // If we managed to fetch destination index mappings, we use them from now on ... + this.fieldMappings = destIndexMappings; + } else { + // ... otherwise we fall back to index mappings deduced based on source indices + this.fieldMappings = deducedDestIndexMappings.get(); + } configurationReadyListener.onResponse(null); }, listener::onFailure); @@ -338,7 +349,8 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); // If we are continuous, we will want to verify we have the latest stored configuration - ActionListener changedSourceListener = ActionListener.wrap(r -> { + ActionListener changedSourceListener = ActionListener.wrap(validationResponse -> { + deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { if (transformConfig.equals(config) && fieldMappings != null) { @@ -377,7 +389,7 @@ protected void onStart(long now, ActionListener listener) { if (hasChanged) { context.setChangesLastDetectedAt(instantOfTrigger); logger.debug("[{}] source has changed, triggering new indexer run.", getJobId()); - changedSourceListener.onResponse(null); + changedSourceListener.onResponse(new ValidateTransformAction.Response(emptyMap())); } else { logger.trace("[{}] source has not changed, finish indexer early.", getJobId()); // No changes, stop executing @@ -396,7 +408,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; context.setLastSearchTime(instantOfTrigger); context.setChangesLastDetectedAt(instantOfTrigger); - changedSourceListener.onResponse(null); + changedSourceListener.onResponse(new ValidateTransformAction.Response(emptyMap())); } } @@ -1165,7 +1177,7 @@ protected boolean shouldAuditOnFinish(long completedCheckpoint) { } private RunState determineRunStateAtStart() { - if (context.from() != null) { + if (context.from() != null && changeCollector != null && changeCollector.queryForChanges()) { return RunState.IDENTIFY_CHANGES; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 189fb26e1f969..3412be813dcf6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -20,7 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -81,7 +81,7 @@ public void preview( buildSearchRequest(sourceConfig, timeout, numberOfBuckets), ActionListener.wrap(r -> { try { - final Aggregations aggregations = r.getAggregations(); + final InternalAggregations aggregations = r.getAggregations(); if (aggregations == null) { listener.onFailure( new ElasticsearchStatusException("Source indices have been deleted or closed.", RestStatus.BAD_REQUEST) @@ -158,7 +158,7 @@ public Tuple, Map> processSearchResponse( TransformIndexerStats stats, TransformProgress progress ) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); // Treat this as a "we reached the end". // This should only happen when all underlying indices have gone away. Consequently, there is no more data to read. diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 0636555459632..684e3a085405d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -92,7 +92,7 @@ interface FieldCollector { * * @return true if this collection is done and there are no more changes to look for */ - boolean collectChangesFromAggregations(Aggregations aggregations); + boolean collectChangesFromAggregations(InternalAggregations aggregations); /** * Return a composite value source builder if the collector requires it. @@ -248,7 +248,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -314,7 +314,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -401,7 +401,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { final SingleValue lowerBoundResult = aggregations.get(minAggregationOutputName); final SingleValue upperBoundResult = aggregations.get(maxAggregationOutputName); @@ -510,7 +510,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { final SingleValue lowerBoundResult = aggregations.get(minAggregationOutputName); final SingleValue upperBoundResult = aggregations.get(maxAggregationOutputName); @@ -659,7 +659,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -743,7 +743,7 @@ public Collection getIndicesToQuery(TransformCheckpoint lastCheckpoint, @Override public Map processSearchResponse(final SearchResponse searchResponse) { - final Aggregations aggregations = searchResponse.getAggregations(); + final InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java index 6094a455578c6..e0e4bc05adbe2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java @@ -43,7 +43,6 @@ import java.util.Set; import java.util.stream.Stream; -import static java.util.Collections.emptyMap; import static java.util.stream.Collectors.toList; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -93,10 +92,6 @@ public void deduceMappings( SourceConfig sourceConfig, final ActionListener> listener ) { - if (Boolean.FALSE.equals(settings.getDeduceMappings())) { - listener.onResponse(emptyMap()); - return; - } SchemaUtil.deduceMappings( client, headers, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformInfoTransportActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformInfoTransportActionTests.java index d2c23f768cef6..e70c947b83c69 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformInfoTransportActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformInfoTransportActionTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.transport.TransportService; @@ -42,7 +42,7 @@ public void testEnabledDefault() { } public void testParseSearchAggs() { - Aggregations emptyAggs = new Aggregations(Collections.emptyList()); + InternalAggregations emptyAggs = InternalAggregations.from(Collections.emptyList()); SearchResponse withEmptyAggs = mock(SearchResponse.class); when(withEmptyAggs.getAggregations()).thenReturn(emptyAggs); @@ -69,19 +69,19 @@ public void testParseSearchAggs() { ); int currentStat = 1; - List aggs = new ArrayList<>(PROVIDED_STATS.length); + List aggs = new ArrayList<>(PROVIDED_STATS.length); for (String statName : PROVIDED_STATS) { aggs.add(buildAgg(statName, currentStat++)); } - Aggregations aggregations = new Aggregations(aggs); + InternalAggregations aggregations = InternalAggregations.from(aggs); SearchResponse withAggs = mock(SearchResponse.class); when(withAggs.getAggregations()).thenReturn(aggregations); assertThat(TransformInfoTransportAction.parseSearchAggs(withAggs), equalTo(expectedStats)); } - private static Aggregation buildAgg(String name, double value) { - NumericMetricsAggregation.SingleValue agg = mock(NumericMetricsAggregation.SingleValue.class); + private static InternalAggregation buildAgg(String name, double value) { + InternalNumericMetricsAggregation.SingleValue agg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(agg.getName()).thenReturn(name); when(agg.value()).thenReturn(value); return agg; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java index 2457e2719c0ee..6535afebdd2f9 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -71,9 +72,9 @@ public class DefaultCheckpointProviderTests extends ESTestCase { private Clock clock; private Client client; private ParentTaskAssigningClient parentTaskClient; - private Client remoteClient1; - private Client remoteClient2; - private Client remoteClient3; + private RemoteClusterClient remoteClient1; + private RemoteClusterClient remoteClient2; + private RemoteClusterClient remoteClient3; private IndexBasedTransformConfigManager transformConfigManager; private MockTransformAuditor transformAuditor; @@ -85,12 +86,9 @@ public void setUpMocks() { client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); parentTaskClient = new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")); - remoteClient1 = mock(Client.class); - when(remoteClient1.threadPool()).thenReturn(threadPool); - remoteClient2 = mock(Client.class); - when(remoteClient2.threadPool()).thenReturn(threadPool); - remoteClient3 = mock(Client.class); - when(remoteClient3.threadPool()).thenReturn(threadPool); + remoteClient1 = mock(RemoteClusterClient.class); + remoteClient2 = mock(RemoteClusterClient.class); + remoteClient3 = mock(RemoteClusterClient.class); when(client.getRemoteClusterClient(eq("remote-1"), any())).thenReturn(remoteClient1); when(client.getRemoteClusterClient(eq("remote-2"), any())).thenReturn(remoteClient2); when(client.getRemoteClusterClient(eq("remote-3"), any())).thenReturn(remoteClient3); @@ -312,7 +310,7 @@ public void testCreateNextCheckpointWithRemoteClient() throws InterruptedExcepti GetCheckpointAction.Response remoteCheckpointResponse = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 4L, 5L, 6L, 7L, 8L }) ); - doAnswer(withResponse(remoteCheckpointResponse)).when(remoteClient1).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); + doAnswer(withResponse(remoteCheckpointResponse)).when(remoteClient1).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); RemoteClusterResolver remoteClusterResolver = mock(RemoteClusterResolver.class); @@ -352,15 +350,15 @@ public void testCreateNextCheckpointWithRemoteClients() throws InterruptedExcept GetCheckpointAction.Response remoteCheckpointResponse1 = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 1L, 2L, 3L }) ); - doAnswer(withResponse(remoteCheckpointResponse1)).when(remoteClient1).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); + doAnswer(withResponse(remoteCheckpointResponse1)).when(remoteClient1).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); GetCheckpointAction.Response remoteCheckpointResponse2 = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 4L, 5L, 6L, 7L, 8L }) ); - doAnswer(withResponse(remoteCheckpointResponse2)).when(remoteClient2).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); + doAnswer(withResponse(remoteCheckpointResponse2)).when(remoteClient2).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); GetCheckpointAction.Response remoteCheckpointResponse3 = new GetCheckpointAction.Response(Map.of("index-1", new long[] { 9L })); - doAnswer(withResponse(remoteCheckpointResponse3)).when(remoteClient3).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); + doAnswer(withResponse(remoteCheckpointResponse3)).when(remoteClient3).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); RemoteClusterResolver remoteClusterResolver = mock(RemoteClusterResolver.class); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java index d76b6b67368f9..69139bc3f7561 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java @@ -29,7 +29,7 @@ public void testEquals() { } public void testFromSearchHit() { - SearchHit searchHit = new SearchHit(1); + SearchHit searchHit = SearchHit.unpooled(1); long seqNo = randomLongBetween(-2, 10_000); long primaryTerm = randomLongBetween(-2, 10_000); String index = randomAlphaOfLength(10); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 8ee7e902285c9..fa8e867d77a49 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -544,7 +544,11 @@ protected void ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(1) }, + new TotalHits(1L, TotalHits.Relation.EQUAL_TO), + 1.0f + ), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index 5dee74cccee7a..a18c926e21da6 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -513,7 +513,7 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), @@ -606,7 +606,7 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), @@ -702,7 +702,7 @@ public void testFailureCounterIsResetOnSuccess() throws Exception { ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index 9e72a92da5bee..e65f6a0e34694 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; +import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -248,7 +249,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } } @@ -335,7 +336,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index 372aef3d0eea7..ee86f2ca6fcf4 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; +import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -268,7 +269,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 945161e548b75..8d0bd4f9d8019 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -20,53 +20,19 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; -import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; +import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.aggregations.bucket.range.InternalRange; import org.elasticsearch.search.aggregations.bucket.range.Range; -import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; -import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroid; import org.elasticsearch.search.aggregations.metrics.InternalMultiValueAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.ParsedAvg; -import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.ParsedMin; -import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.metrics.Percentiles; -import org.elasticsearch.search.aggregations.metrics.ScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; @@ -77,8 +43,6 @@ import org.elasticsearch.xpack.transform.transforms.pivot.AggregationResultUtils.BucketKeyExtractor; import java.io.IOException; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -86,8 +50,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static java.util.Arrays.asList; -import static java.util.Collections.emptyMap; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.is; @@ -97,42 +59,12 @@ public class AggregationResultUtilsTests extends ESTestCase { - private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(namedXContents); - - private final String KEY = Aggregation.CommonFields.KEY.getPreferredName(); - private final String DOC_COUNT = Aggregation.CommonFields.DOC_COUNT.getPreferredName(); - - // aggregations potentially useful for writing tests, to be expanded as necessary - private static final List namedXContents; - static { - Map> map = new HashMap<>(); - map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); - map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)); - map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); - map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); - map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); - map.put(BucketScriptPipelineAggregationBuilder.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c)); - map.put(ScriptedMetricAggregationBuilder.NAME, (p, c) -> ParsedScriptedMetric.fromXContent(p, (String) c)); - map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); - map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c)); - map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c)); - map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)); - map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); - map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); - map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); - - namedXContents = map.entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - } - class TestMultiValueAggregation extends InternalMultiValueAggregation { private final Map values; TestMultiValueAggregation(String name, Map values) { - super(name, emptyMap()); + super(name, Map.of()); this.values = values; } @@ -143,7 +75,7 @@ public String getWriteableName() { @Override public List getValuesAsStrings(String name) { - return Collections.singletonList(values.get(name).toString()); + return List.of(values.get(name).toString()); } @Override @@ -182,7 +114,7 @@ class TestNumericMultiValueAggregation extends InternalNumericMetricsAggregation private final Map values; TestNumericMultiValueAggregation(String name, Map values) { - super(name, null, emptyMap()); + super(name, null, Map.of()); this.values = values; } @@ -217,11 +149,6 @@ public Iterable valueNames() { } } - @Override - protected NamedXContentRegistry xContentRegistry() { - return namedXContentRegistry; - } - public void testExtractCompositeAggregationResults() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); @@ -230,25 +157,35 @@ public void testExtractCompositeAggregationResults() throws IOException { """, targetField)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; - Collection aggregationBuilders = Collections.singletonList(AggregationBuilders.avg(aggName)); - - Map input = asMap( - "buckets", - asList( - asMap(KEY, asMap(targetField, "ID1"), aggTypedName, asMap("value", 42.33), DOC_COUNT, 8), - asMap(KEY, asMap(targetField, "ID2"), aggTypedName, asMap("value", 28.99), DOC_COUNT, 3), - asMap(KEY, asMap(targetField, "ID3"), aggTypedName, asMap("value", Double.NaN), DOC_COUNT, 0) + List aggregationBuilders = List.of(AggregationBuilders.avg(aggName)); + + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( + asMap(targetField, "ID1"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 42.33))), + 8L + ), + createInternalCompositeBucket( + asMap(targetField, "ID2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 28.99))), + 3L + ), + createInternalCompositeBucket( + asMap(targetField, "ID3"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, Double.NaN))), + 0L + ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", aggName, 42.33), asMap(targetField, "ID2", aggName, 28.99), asMap(targetField, "ID3", aggName, null) ); Map fieldTypeMap = asStringMap(targetField, "keyword", aggName, "double"); - executeTest(groupBy, aggregationBuilders, Collections.emptyList(), input, fieldTypeMap, expected, 11); + executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 11); } public void testExtractCompositeAggregationResultsMultipleGroups() throws IOException { @@ -271,26 +208,41 @@ public void testExtractCompositeAggregationResultsMultipleGroups() throws IOExce String aggName = randomAlphaOfLengthBetween(5, 10); String aggTypedName = "avg#" + aggName; - Collection aggregationBuilders = Collections.singletonList(AggregationBuilders.avg(aggName)); - - Map input = asMap( - "buckets", - asList( - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID1_2"), aggTypedName, asMap("value", 42.33), DOC_COUNT, 1), - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID2_2"), aggTypedName, asMap("value", 8.4), DOC_COUNT, 2), - asMap(KEY, asMap(targetField, "ID2", targetField2, "ID1_2"), aggTypedName, asMap("value", 28.99), DOC_COUNT, 3), - asMap(KEY, asMap(targetField, "ID3", targetField2, "ID2_2"), aggTypedName, asMap("value", Double.NaN), DOC_COUNT, 0) + List aggregationBuilders = List.of(AggregationBuilders.avg(aggName)); + + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 42.33))), + 1L + ), + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 8.4))), + 2L + ), + createInternalCompositeBucket( + asMap(targetField, "ID2", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 28.99))), + 3L + ), + createInternalCompositeBucket( + asMap(targetField, "ID3", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, Double.NaN))), + 0L + ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", targetField2, "ID1_2", aggName, 42.33), asMap(targetField, "ID1", targetField2, "ID2_2", aggName, 8.4), asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 28.99), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, null) ); Map fieldTypeMap = asStringMap(aggName, "double", targetField, "keyword", targetField2, "keyword"); - executeTest(groupBy, aggregationBuilders, Collections.emptyList(), input, fieldTypeMap, expected, 6); + executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 6); } public void testExtractCompositeAggregationResultsMultiAggregations() throws IOException { @@ -306,56 +258,38 @@ public void testExtractCompositeAggregationResultsMultiAggregations() throws IOE }""", targetField)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; String aggName2 = randomAlphaOfLengthBetween(5, 10) + "_2"; - String aggTypedName2 = "max#" + aggName2; - Collection aggregationBuilders = asList(AggregationBuilders.avg(aggName), AggregationBuilders.max(aggName2)); + List aggregationBuilders = List.of(AggregationBuilders.avg(aggName), AggregationBuilders.max(aggName2)); - Map input = asMap( - "buckets", - asList( - asMap( - KEY, + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( asMap(targetField, "ID1"), - aggTypedName, - asMap("value", 42.33), - aggTypedName2, - asMap("value", 9.9), - DOC_COUNT, - 111 + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 42.33), createSingleMetricAgg(aggName2, 9.9))), + 111L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID2"), - aggTypedName, - asMap("value", 28.99), - aggTypedName2, - asMap("value", 222.33), - DOC_COUNT, - 88 + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 28.99), createSingleMetricAgg(aggName2, 222.33))), + 88L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID3"), - aggTypedName, - asMap("value", 12.55), - aggTypedName2, - asMap("value", Double.NaN), - DOC_COUNT, - 1 + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 12.55), createSingleMetricAgg(aggName2, Double.NaN))), + 1L ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", aggName, 42.33, aggName2, 9.9), asMap(targetField, "ID2", aggName, 28.99, aggName2, 222.33), asMap(targetField, "ID3", aggName, 12.55, aggName2, null) ); Map fieldTypeMap = asStringMap(targetField, "keyword", aggName, "double", aggName2, "double"); - executeTest(groupBy, aggregationBuilders, Collections.emptyList(), input, fieldTypeMap, expected, 200); + executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 200); } public void testExtractCompositeAggregationResultsMultiAggregationsAndTypes() throws IOException { @@ -377,60 +311,43 @@ public void testExtractCompositeAggregationResultsMultiAggregationsAndTypes() th }""", targetField, targetField2)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; String aggName2 = randomAlphaOfLengthBetween(5, 10) + "_2"; - String aggTypedName2 = "max#" + aggName2; - Collection aggregationBuilders = asList(AggregationBuilders.avg(aggName), AggregationBuilders.max(aggName2)); + List aggregationBuilders = List.of(AggregationBuilders.avg(aggName), AggregationBuilders.max(aggName2)); - Map input = asMap( - "buckets", - asList( - asMap( - KEY, + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID1_2"), - aggTypedName, - asMap("value", 42.33), - aggTypedName2, - asMap("value", 9.9, "value_as_string", "9.9F"), - DOC_COUNT, - 1 + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 42.33), createSingleMetricAgg(aggName2, 9.9, "9.9F"))), + 1L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID2_2"), - aggTypedName, - asMap("value", 8.4), - aggTypedName2, - asMap("value", 222.33, "value_as_string", "222.33F"), - DOC_COUNT, - 2 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 8.4), createSingleMetricAgg(aggName2, 222.33, "222.33F")) + ), + 2L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID2", targetField2, "ID1_2"), - aggTypedName, - asMap("value", 28.99), - aggTypedName2, - asMap("value", -2.44, "value_as_string", "-2.44F"), - DOC_COUNT, - 3 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 28.99), createSingleMetricAgg(aggName2, -2.44, "-2.44F")) + ), + 3L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID3", targetField2, "ID2_2"), - aggTypedName, - asMap("value", 12.55), - aggTypedName2, - asMap("value", Double.NaN, "value_as_string", "NaN"), - DOC_COUNT, - 4 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 12.55), createSingleMetricAgg(aggName2, Double.NaN, "NaN")) + ), + 4L ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", targetField2, "ID1_2", aggName, 42.33, aggName2, "9.9F"), asMap(targetField, "ID1", targetField2, "ID2_2", aggName, 8.4, aggName2, "222.33F"), asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 28.99, aggName2, "-2.44F"), @@ -446,7 +363,7 @@ public void testExtractCompositeAggregationResultsMultiAggregationsAndTypes() th targetField2, "keyword" ); - executeTest(groupBy, aggregationBuilders, Collections.emptyList(), input, fieldTypeMap, expected, 10); + executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 10); } public void testExtractCompositeAggregationResultsWithDynamicType() throws IOException { @@ -468,49 +385,42 @@ public void testExtractCompositeAggregationResultsWithDynamicType() throws IOExc }""", targetField, targetField2)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "scripted_metric#" + aggName; - Collection aggregationBuilders = asList(AggregationBuilders.scriptedMetric(aggName)); + List aggregationBuilders = List.of(AggregationBuilders.scriptedMetric(aggName)); - Map input = asMap( - "buckets", - asList( - asMap( - KEY, + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID1_2"), - aggTypedName, - asMap("value", asMap("field", 123.0)), - DOC_COUNT, - 1 + InternalAggregations.from(List.of(createScriptedMetric(aggName, asMap("field", 123.0)))), + 1L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID2_2"), - aggTypedName, - asMap("value", asMap("field", 1.0)), - DOC_COUNT, - 2 + InternalAggregations.from(List.of(createScriptedMetric(aggName, asMap("field", 1.0)))), + 2L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID2", targetField2, "ID1_2"), - aggTypedName, - asMap("value", asMap("field", 2.13)), - DOC_COUNT, - 3 + InternalAggregations.from(List.of(createScriptedMetric(aggName, asMap("field", 2.13)))), + 3L ), - asMap(KEY, asMap(targetField, "ID3", targetField2, "ID2_2"), aggTypedName, asMap("value", null), DOC_COUNT, 0) + createInternalCompositeBucket( + asMap(targetField, "ID3", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createScriptedMetric(aggName, null))), + 0L + ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", targetField2, "ID1_2", aggName, asMap("field", 123.0)), asMap(targetField, "ID1", targetField2, "ID2_2", aggName, asMap("field", 1.0)), asMap(targetField, "ID2", targetField2, "ID1_2", aggName, asMap("field", 2.13)), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, null) ); Map fieldTypeMap = asStringMap(targetField, "keyword", targetField2, "keyword"); - executeTest(groupBy, aggregationBuilders, Collections.emptyList(), input, fieldTypeMap, expected, 6); + executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 6); } public void testExtractCompositeAggregationResultsWithPipelineAggregation() throws IOException { @@ -532,66 +442,47 @@ public void testExtractCompositeAggregationResultsWithPipelineAggregation() thro }""", targetField, targetField2)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; String pipelineAggName = randomAlphaOfLengthBetween(5, 10) + "_2"; - String pipelineAggTypedName = "bucket_script#" + pipelineAggName; - - Collection aggregationBuilders = asList(AggregationBuilders.scriptedMetric(aggName)); - Collection pipelineAggregationBuilders = asList( - PipelineAggregatorBuilders.bucketScript( - pipelineAggName, - Collections.singletonMap("param_1", aggName), - new Script("return params.param_1") - ) + + List aggregationBuilders = List.of(AggregationBuilders.scriptedMetric(aggName)); + List pipelineAggregationBuilders = List.of( + PipelineAggregatorBuilders.bucketScript(pipelineAggName, Map.of("param_1", aggName), new Script("return params.param_1")) ); - Map input = asMap( - "buckets", - asList( - asMap( - KEY, + InternalComposite input = createComposite( + List.of( + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID1_2"), - aggTypedName, - asMap("value", 123.0), - pipelineAggTypedName, - asMap("value", 123.0), - DOC_COUNT, - 1 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 123.0), createSingleMetricAgg(pipelineAggName, 123.0, "123.0")) + ), + 1L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID1", targetField2, "ID2_2"), - aggTypedName, - asMap("value", 1.0), - pipelineAggTypedName, - asMap("value", 1.0), - DOC_COUNT, - 2 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 1.0), createSingleMetricAgg(pipelineAggName, 1.0, "1.0")) + ), + 2L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID2", targetField2, "ID1_2"), - aggTypedName, - asMap("value", 2.13), - pipelineAggTypedName, - asMap("value", 2.13), - DOC_COUNT, - 3 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 2.13), createSingleMetricAgg(pipelineAggName, 2.13, "2.13")) + ), + 3L ), - asMap( - KEY, + createInternalCompositeBucket( asMap(targetField, "ID3", targetField2, "ID2_2"), - aggTypedName, - asMap("value", 12.0), - pipelineAggTypedName, - asMap("value", Double.NaN), - DOC_COUNT, - 4 + InternalAggregations.from( + List.of(createSingleMetricAgg(aggName, 12.0), createSingleMetricAgg(pipelineAggName, Double.NaN, "NaN")) + ), + 4L ) ) ); - List> expected = asList( + List> expected = List.of( asMap(targetField, "ID1", targetField2, "ID1_2", aggName, 123.0, pipelineAggName, 123.0), asMap(targetField, "ID1", targetField2, "ID2_2", aggName, 1.0, pipelineAggName, 1.0), asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 2.13, pipelineAggName, 2.13), @@ -620,28 +511,58 @@ public void testExtractCompositeAggregationResultsDocIDs() throws IOException { }""", targetField, targetField2)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; - Collection aggregationBuilders = Collections.singletonList(AggregationBuilders.avg(aggName)); - - Map inputFirstRun = asMap( - "buckets", - asList( - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID1_2"), aggTypedName, asMap("value", 42.33), DOC_COUNT, 1), - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID2_2"), aggTypedName, asMap("value", 8.4), DOC_COUNT, 2), - asMap(KEY, asMap(targetField, "ID2", targetField2, "ID1_2"), aggTypedName, asMap("value", 28.99), DOC_COUNT, 3), - asMap(KEY, asMap(targetField, "ID3", targetField2, "ID2_2"), aggTypedName, asMap("value", 12.55), DOC_COUNT, 4) + List aggregationBuilders = List.of(AggregationBuilders.avg(aggName)); + + InternalComposite inputFirstRun = createComposite( + List.of( + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 42.33))), + 1L + ), + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 8.4))), + 2L + ), + createInternalCompositeBucket( + asMap(targetField, "ID2", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 28.99))), + 3L + ), + createInternalCompositeBucket( + asMap(targetField, "ID3", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 12.55))), + 4L + ) ) ); - Map inputSecondRun = asMap( - "buckets", - asList( - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID1_2"), aggTypedName, asMap("value", 433.33), DOC_COUNT, 12), - asMap(KEY, asMap(targetField, "ID1", targetField2, "ID2_2"), aggTypedName, asMap("value", 83.4), DOC_COUNT, 32), - asMap(KEY, asMap(targetField, "ID2", targetField2, "ID1_2"), aggTypedName, asMap("value", 21.99), DOC_COUNT, 2), - asMap(KEY, asMap(targetField, "ID3", targetField2, "ID2_2"), aggTypedName, asMap("value", 122.55), DOC_COUNT, 44) + InternalComposite inputSecondRun = createComposite( + List.of( + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 433.33))), + 12L + ), + createInternalCompositeBucket( + asMap(targetField, "ID1", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 83.4))), + 32L + ), + createInternalCompositeBucket( + asMap(targetField, "ID2", targetField2, "ID1_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 21.99))), + 2L + ), + createInternalCompositeBucket( + asMap(targetField, "ID3", targetField2, "ID2_2"), + InternalAggregations.from(List.of(createSingleMetricAgg(aggName, 122.55))), + 44L + ) ) ); + TransformIndexerStats stats = new TransformIndexerStats(); TransformProgress progress = new TransformProgress(); @@ -650,7 +571,7 @@ public void testExtractCompositeAggregationResultsDocIDs() throws IOException { List> resultFirstRun = runExtraction( groupBy, aggregationBuilders, - Collections.emptyList(), + List.of(), inputFirstRun, fieldTypeMap, stats, @@ -659,7 +580,7 @@ public void testExtractCompositeAggregationResultsDocIDs() throws IOException { List> resultSecondRun = runExtraction( groupBy, aggregationBuilders, - Collections.emptyList(), + List.of(), inputSecondRun, fieldTypeMap, stats, @@ -724,53 +645,52 @@ public void testUpdateDocumentWithObjectAndNotObject() { assertThat(exception.getMessage(), equalTo("mixed object types of nested and non-nested fields [foo.bar]")); } - public static NumericMetricsAggregation.SingleValue createSingleMetricAgg(String name, Double value, String valueAsString) { - NumericMetricsAggregation.SingleValue agg = mock(NumericMetricsAggregation.SingleValue.class); + public static InternalNumericMetricsAggregation.SingleValue createSingleMetricAgg(String name, Double value) { + InternalNumericMetricsAggregation.SingleValue agg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(agg.value()).thenReturn(value); - when(agg.getValueAsString()).thenReturn(valueAsString); when(agg.getName()).thenReturn(name); return agg; } + public static InternalNumericMetricsAggregation.SingleValue createSingleMetricAgg(String name, Double value, String valueAsString) { + InternalNumericMetricsAggregation.SingleValue agg = createSingleMetricAgg(name, value); + when(agg.getValueAsString()).thenReturn(valueAsString); + return agg; + } + public void testSingleValueAggExtractor() { Aggregation agg = createSingleMetricAgg("metric", Double.NaN, "NaN"); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "double"), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "double"), ""), is(nullValue())); agg = createSingleMetricAgg("metric", Double.POSITIVE_INFINITY, "NaN"); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "double"), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "double"), ""), is(nullValue())); agg = createSingleMetricAgg("metric", 100.0, "100.0"); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "double"), ""), equalTo(100.0)); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "double"), ""), equalTo(100.0)); agg = createSingleMetricAgg("metric", 100.0, "one_hundred"); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "double"), ""), equalTo(100.0)); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "double"), ""), equalTo(100.0)); agg = createSingleMetricAgg("metric", 100.0, "one_hundred"); - assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "string"), ""), - equalTo("one_hundred") - ); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "string"), ""), equalTo("one_hundred")); agg = createSingleMetricAgg("metric", 100.0, "one_hundred"); - assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("metric", "unsigned_long"), ""), - equalTo(100L) - ); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of("metric", "unsigned_long"), ""), equalTo(100L)); } public void testMultiValueAggExtractor() { - Aggregation agg = new TestMultiValueAggregation("mv_metric", Collections.singletonMap("ip", "192.168.1.1")); + Aggregation agg = new TestMultiValueAggregation("mv_metric", Map.of("ip", "192.168.1.1")); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("mv_metric.ip", "ip"), ""), - equalTo(Collections.singletonMap("ip", "192.168.1.1")) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.ip", "ip"), ""), + equalTo(Map.of("ip", "192.168.1.1")) ); - agg = new TestMultiValueAggregation("mv_metric", Collections.singletonMap("top_answer", "fortytwo")); + agg = new TestMultiValueAggregation("mv_metric", Map.of("top_answer", "fortytwo")); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("mv_metric.written_answer", "written_answer"), ""), - equalTo(Collections.singletonMap("top_answer", "fortytwo")) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.written_answer", "written_answer"), ""), + equalTo(Map.of("top_answer", "fortytwo")) ); agg = new TestMultiValueAggregation("mv_metric", Map.of("ip", "192.168.1.1", "top_answer", "fortytwo")); @@ -782,21 +702,18 @@ public void testMultiValueAggExtractor() { } public void testNumericMultiValueAggExtractor() { - Aggregation agg = new TestNumericMultiValueAggregation( - "mv_metric", - Collections.singletonMap("approx_answer", Double.valueOf(42.2)) - ); + Aggregation agg = new TestNumericMultiValueAggregation("mv_metric", Map.of("approx_answer", 42.2)); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("mv_metric.approx_answer", "double"), ""), - equalTo(Collections.singletonMap("approx_answer", Double.valueOf(42.2))) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.approx_answer", "double"), ""), + equalTo(Map.of("approx_answer", Double.valueOf(42.2))) ); - agg = new TestNumericMultiValueAggregation("mv_metric", Collections.singletonMap("exact_answer", Double.valueOf(42.0))); + agg = new TestNumericMultiValueAggregation("mv_metric", Map.of("exact_answer", 42.0)); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.singletonMap("mv_metric.exact_answer", "long"), ""), - equalTo(Collections.singletonMap("exact_answer", Long.valueOf(42))) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.exact_answer", "long"), ""), + equalTo(Map.of("exact_answer", 42L)) ); agg = new TestNumericMultiValueAggregation( @@ -813,27 +730,28 @@ public void testNumericMultiValueAggExtractor() { assertThat( AggregationResultUtils.getExtractor(agg) .value(agg, Map.of("filter.mv_metric.approx_answer", "double", "filter.mv_metric.exact_answer", "long"), "filter"), - equalTo(Map.of("approx_answer", Double.valueOf(42.2), "exact_answer", Long.valueOf(42))) + equalTo(Map.of("approx_answer", 42.2, "exact_answer", Long.valueOf(42))) ); } - private ScriptedMetric createScriptedMetric(Object returnValue) { - ScriptedMetric agg = mock(ScriptedMetric.class); + private InternalScriptedMetric createScriptedMetric(String name, Object returnValue) { + InternalScriptedMetric agg = mock(InternalScriptedMetric.class); + when(agg.getName()).thenReturn(name); when(agg.aggregation()).thenReturn(returnValue); return agg; } @SuppressWarnings("unchecked") public void testScriptedMetricAggExtractor() { - Aggregation agg = createScriptedMetric(null); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), is(nullValue())); + Aggregation agg = createScriptedMetric("name", null); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), is(nullValue())); - agg = createScriptedMetric(Collections.singletonList("values")); - Object val = AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""); + agg = createScriptedMetric("name", List.of("values")); + Object val = AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""); assertThat((List) val, hasItem("values")); - agg = createScriptedMetric(Collections.singletonMap("key", 100)); - val = AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""); + agg = createScriptedMetric("name", Map.of("key", 100)); + val = AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""); assertThat(((Map) val).get("key"), equalTo(100)); } @@ -846,13 +764,13 @@ private GeoCentroid createGeoCentroid(GeoPoint point, long count) { public void testGeoCentroidAggExtractor() { Aggregation agg = createGeoCentroid(null, 0); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), is(nullValue())); agg = createGeoCentroid(new GeoPoint(100.0, 101.0), 0); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), is(nullValue())); agg = createGeoCentroid(new GeoPoint(100.0, 101.0), randomIntBetween(1, 100)); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), equalTo("100.0, 101.0")); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo("100.0, 101.0")); } private GeoBounds createGeoBounds(GeoPoint tl, GeoPoint br) { @@ -866,10 +784,10 @@ private GeoBounds createGeoBounds(GeoPoint tl, GeoPoint br) { public void testGeoBoundsAggExtractor() { final int numberOfRuns = 25; Aggregation agg = createGeoBounds(null, new GeoPoint(100.0, 101.0)); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), is(nullValue())); agg = createGeoBounds(new GeoPoint(100.0, 101.0), null); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), is(nullValue())); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), is(nullValue())); String type = "point"; for (int i = 0; i < numberOfRuns; i++) { @@ -877,9 +795,9 @@ public void testGeoBoundsAggExtractor() { expectedObject.put("type", type); double lat = randomDoubleBetween(-90.0, 90.0, false); double lon = randomDoubleBetween(-180.0, 180.0, false); - expectedObject.put("coordinates", asList(lon, lat)); + expectedObject.put("coordinates", List.of(lon, lat)); agg = createGeoBounds(new GeoPoint(lat, lon), new GeoPoint(lat, lon)); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), equalTo(expectedObject)); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(expectedObject)); } type = "linestring"; @@ -894,7 +812,7 @@ public void testGeoBoundsAggExtractor() { lon2 = randomDoubleBetween(-180.0, 180.0, false); } agg = createGeoBounds(new GeoPoint(lat, lon), new GeoPoint(lat2, lon2)); - Object val = AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""); + Object val = AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""); Map geoJson = (Map) val; assertThat(geoJson.get("type"), equalTo(type)); List coordinates = (List) geoJson.get("coordinates"); @@ -918,18 +836,18 @@ public void testGeoBoundsAggExtractor() { lon2 = randomDoubleBetween(-180.0, 180.0, false); } agg = createGeoBounds(new GeoPoint(lat, lon), new GeoPoint(lat2, lon2)); - Object val = AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""); + Object val = AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""); Map geoJson = (Map) val; assertThat(geoJson.get("type"), equalTo(type)); List> coordinates = (List>) geoJson.get("coordinates"); assertThat(coordinates.size(), equalTo(1)); assertThat(coordinates.get(0).size(), equalTo(5)); - List> expected = asList( - asList(lon, lat), - asList(lon2, lat), - asList(lon2, lat2), - asList(lon, lat2), - asList(lon, lat) + List> expected = List.of( + List.of(lon, lat), + List.of(lon2, lat), + List.of(lon2, lat2), + List.of(lon, lat2), + List.of(lon, lat) ); for (int j = 0; j < 5; j++) { Double[] coordinate = coordinates.get(0).get(j); @@ -940,6 +858,28 @@ public void testGeoBoundsAggExtractor() { } } + private static InternalComposite createComposite(List buckets) { + InternalComposite composite = mock(InternalComposite.class); + + when(composite.getBuckets()).thenReturn(buckets); + when(composite.getName()).thenReturn("my_feature"); + Map afterKey = buckets.get(buckets.size() - 1).getKey(); + when(composite.afterKey()).thenReturn(afterKey); + return composite; + } + + private static InternalComposite.InternalBucket createInternalCompositeBucket( + Map key, + InternalAggregations aggregations, + long docCount + ) { + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); + when(bucket.getDocCount()).thenReturn(docCount); + when(bucket.getAggregations()).thenReturn(aggregations); + when(bucket.getKey()).thenReturn(key); + return bucket; + } + public static Percentiles createPercentilesAgg(String name, List percentiles) { Percentiles agg = mock(Percentiles.class); @@ -951,17 +891,17 @@ public static Percentiles createPercentilesAgg(String name, List per public void testPercentilesAggExtractor() { Aggregation agg = createPercentilesAgg( "p_agg", - asList(new Percentile(1, 0), new Percentile(50, 22.2), new Percentile(99, 43.3), new Percentile(99.5, 100.3)) + List.of(new Percentile(1, 0), new Percentile(50, 22.2), new Percentile(99, 43.3), new Percentile(99.5, 100.3)) ); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), + AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(asMap("1", 0.0, "50", 22.2, "99", 43.3, "99_5", 100.3)) ); } public void testPercentilesAggExtractorNaN() { - Aggregation agg = createPercentilesAgg("p_agg", asList(new Percentile(1, Double.NaN), new Percentile(50, Double.NaN))); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), equalTo(asMap("1", null, "50", null))); + Aggregation agg = createPercentilesAgg("p_agg", List.of(new Percentile(1, Double.NaN), new Percentile(50, Double.NaN))); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(asMap("1", null, "50", null))); } @SuppressWarnings("unchecked") @@ -975,7 +915,7 @@ public static Range createRangeAgg(String name, List bucke public void testRangeAggExtractor() { Aggregation agg = createRangeAgg( "p_agg", - asList( + List.of( new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, false, DocValueFormat.RAW), new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), @@ -987,7 +927,7 @@ public void testRangeAggExtractor() { ) ); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), + AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo( asMap( "*-10_5", @@ -1011,14 +951,16 @@ public void testRangeAggExtractor() { ); } - public static SingleBucketAggregation createSingleBucketAgg(String name, long docCount, Aggregation... subAggregations) { - SingleBucketAggregation agg = mock(SingleBucketAggregation.class); + public static InternalSingleBucketAggregation createSingleBucketAgg( + String name, + long docCount, + InternalAggregation... subAggregations + ) { + InternalSingleBucketAggregation agg = mock(InternalSingleBucketAggregation.class); when(agg.getDocCount()).thenReturn(docCount); when(agg.getName()).thenReturn(name); if (subAggregations != null) { - org.elasticsearch.search.aggregations.Aggregations subAggs = new org.elasticsearch.search.aggregations.Aggregations( - asList(subAggregations) - ); + InternalAggregations subAggs = InternalAggregations.from(List.of(subAggregations)); when(agg.getAggregations()).thenReturn(subAggs); } else { when(agg.getAggregations()).thenReturn(null); @@ -1028,13 +970,10 @@ public static SingleBucketAggregation createSingleBucketAgg(String name, long do public void testSingleBucketAggExtractor() { Aggregation agg = createSingleBucketAgg("sba", 42L); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), equalTo(42L)); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(42L)); agg = createSingleBucketAgg("sba1", 42L, createSingleMetricAgg("sub1", 100.0, "100.0")); - assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, Collections.emptyMap(), ""), - equalTo(Collections.singletonMap("sub1", 100.0)) - ); + assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(Map.of("sub1", 100.0))); agg = createSingleBucketAgg( "sba2", @@ -1097,17 +1036,15 @@ public void testDatesAsEpochBucketKeyExtractor() { private void executeTest( GroupConfig groups, - Collection aggregationBuilders, - Collection pipelineAggregationBuilders, - Map input, + List aggregationBuilders, + List pipelineAggregationBuilders, + InternalComposite input, Map fieldTypeMap, List> expected, long expectedDocCounts - ) throws IOException { + ) { TransformIndexerStats stats = new TransformIndexerStats(); TransformProgress progress = new TransformProgress(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.map(input); List> result = runExtraction( groups, @@ -1131,30 +1068,23 @@ private void executeTest( private List> runExtraction( GroupConfig groups, - Collection aggregationBuilders, - Collection pipelineAggregationBuilders, - Map input, + List aggregationBuilders, + List pipelineAggregationBuilders, + InternalComposite input, Map fieldTypeMap, TransformIndexerStats stats, TransformProgress progress - ) throws IOException { - - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.map(input); - - try (XContentParser parser = createParser(builder)) { - CompositeAggregation agg = ParsedComposite.fromXContent(parser, "my_feature"); - return AggregationResultUtils.extractCompositeAggregationResults( - agg, - groups, - aggregationBuilders, - pipelineAggregationBuilders, - fieldTypeMap, - stats, - progress, - true - ).collect(Collectors.toList()); - } + ) { + return AggregationResultUtils.extractCompositeAggregationResults( + input, + groups, + aggregationBuilders, + pipelineAggregationBuilders, + fieldTypeMap, + stats, + progress, + true + ).collect(Collectors.toList()); } private GroupConfig parseGroupConfig(String json) throws IOException { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java index 708cb3d93cbed..a774a202f333b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java @@ -11,9 +11,10 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -26,7 +27,6 @@ import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSourceTests; import org.elasticsearch.xpack.transform.transforms.Function.ChangeCollector; import org.elasticsearch.xpack.transform.transforms.pivot.CompositeBucketsChangeCollector.FieldCollector; -import org.mockito.stubbing.Answer; import java.io.IOException; import java.util.ArrayList; @@ -91,28 +91,28 @@ public void testTermsFieldCollector() throws IOException { ChangeCollector collector = CompositeBucketsChangeCollector.buildChangeCollector(groups, null); - CompositeAggregation composite = mock(CompositeAggregation.class); + InternalComposite composite = mock(InternalComposite.class); when(composite.getName()).thenReturn("_transform_change_collector"); - when(composite.getBuckets()).thenAnswer((Answer>) invocationOnMock -> { - List compositeBuckets = new ArrayList<>(); - CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); + when(composite.getBuckets()).thenAnswer(invocationOnMock -> { + List compositeBuckets = new ArrayList<>(); + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); when(bucket.getKey()).thenReturn(Collections.singletonMap("id", "id1")); compositeBuckets.add(bucket); - bucket = mock(CompositeAggregation.Bucket.class); + bucket = mock(InternalComposite.InternalBucket.class); when(bucket.getKey()).thenReturn(Collections.singletonMap("id", "id2")); compositeBuckets.add(bucket); - bucket = mock(CompositeAggregation.Bucket.class); + bucket = mock(InternalComposite.InternalBucket.class); when(bucket.getKey()).thenReturn(Collections.singletonMap("id", "id3")); compositeBuckets.add(bucket); return compositeBuckets; }); - Aggregations aggs = new Aggregations(Collections.singletonList(composite)); + InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(composite)); SearchResponse response = new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, null, false, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java index dab6d8518d28f..a70bf930a7d5d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java @@ -11,10 +11,11 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation.SingleValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; @@ -59,8 +60,8 @@ public class DateHistogramFieldCollectorTests extends ESTestCase { @Before public void setupDateHistogramFieldCollectorTest() { - minTimestamp = mock(NumericMetricsAggregation.SingleValue.class); - maxTimestamp = mock(NumericMetricsAggregation.SingleValue.class); + minTimestamp = mock(InternalNumericMetricsAggregation.SingleValue.class); + maxTimestamp = mock(InternalNumericMetricsAggregation.SingleValue.class); when(minTimestamp.getName()).thenReturn("_transform_change_collector.output_timestamp.min"); when(maxTimestamp.getName()).thenReturn("_transform_change_collector.output_timestamp.max"); @@ -171,8 +172,8 @@ private static QueryBuilder buildFilterQuery(ChangeCollector collector) { private static SearchResponse buildSearchResponse(SingleValue minTimestamp, SingleValue maxTimestamp) { return new SearchResponse( - null, - new Aggregations(Arrays.asList(minTimestamp, maxTimestamp)), + SearchHits.EMPTY_WITH_TOTAL_HITS, + InternalAggregations.from(Arrays.asList(minTimestamp, maxTimestamp)), null, false, null, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index 67f923769ffe3..5d58ac9904482 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; @@ -19,12 +20,13 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; @@ -39,6 +41,8 @@ import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfigTests; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; +import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfigTests; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; @@ -239,32 +243,55 @@ public void testProcessSearchResponse() { SettingsConfigTests.randomSettingsConfig(), TransformConfigVersion.CURRENT, Collections.emptySet() - ); + ) { + @Override + public Tuple, Map> processSearchResponse( + SearchResponse searchResponse, + String destinationIndex, + String destinationPipeline, + Map fieldTypeMap, + TransformIndexerStats stats, + TransformProgress progress + ) { + try { + return super.processSearchResponse( + searchResponse, + destinationIndex, + destinationPipeline, + fieldTypeMap, + stats, + progress + ); + } finally { + searchResponse.decRef(); + } + } + }; - Aggregations aggs = null; + InternalAggregations aggs = null; assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(nullValue())); - aggs = new Aggregations(List.of()); + aggs = InternalAggregations.from(List.of()); assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(nullValue())); - CompositeAggregation compositeAgg = mock(CompositeAggregation.class); + InternalComposite compositeAgg = mock(InternalComposite.class); when(compositeAgg.getName()).thenReturn("_transform"); when(compositeAgg.getBuckets()).thenReturn(List.of()); when(compositeAgg.afterKey()).thenReturn(null); - aggs = new Aggregations(List.of(compositeAgg)); + aggs = InternalAggregations.from(List.of(compositeAgg)); assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(nullValue())); when(compositeAgg.getBuckets()).thenReturn(List.of()); when(compositeAgg.afterKey()).thenReturn(Map.of("key", "value")); - aggs = new Aggregations(List.of(compositeAgg)); + aggs = InternalAggregations.from(List.of(compositeAgg)); // Empty bucket list is *not* a stop condition for composite agg processing. assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(notNullValue())); - CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); - List buckets = List.of(bucket); + InternalComposite.InternalBucket bucket = mock(InternalComposite.InternalBucket.class); + List buckets = List.of(bucket); doReturn(buckets).when(compositeAgg).getBuckets(); when(compositeAgg.afterKey()).thenReturn(null); - aggs = new Aggregations(List.of(compositeAgg)); + aggs = InternalAggregations.from(List.of(compositeAgg)); assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(nullValue())); } @@ -323,8 +350,23 @@ public void testPreviewForCompositeAggregation() throws Exception { assertThat(responseHolder.get(), is(empty())); } - private static SearchResponse searchResponseFromAggs(Aggregations aggs) { - return new SearchResponse(null, aggs, null, false, null, null, 1, null, 10, 5, 0, 0, new ShardSearchFailure[0], null); + private static SearchResponse searchResponseFromAggs(InternalAggregations aggs) { + return new SearchResponse( + SearchHits.EMPTY_WITH_TOTAL_HITS, + aggs, + null, + false, + null, + null, + 1, + null, + 10, + 5, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ); } private class MyMockClient extends NoOpClient { @@ -392,7 +434,7 @@ protected void ActionListener listener ) { SearchResponse response = mock(SearchResponse.class); - when(response.getAggregations()).thenReturn(new Aggregations(List.of())); + when(response.getAggregations()).thenReturn(InternalAggregations.from(List.of())); listener.onResponse((Response) response); } } @@ -410,8 +452,8 @@ protected void ActionListener listener ) { SearchResponse response = mock(SearchResponse.class); - CompositeAggregation compositeAggregation = mock(CompositeAggregation.class); - when(response.getAggregations()).thenReturn(new Aggregations(List.of(compositeAggregation))); + InternalComposite compositeAggregation = mock(InternalComposite.class); + when(response.getAggregations()).thenReturn(InternalAggregations.from(List.of(compositeAggregation))); when(compositeAggregation.getBuckets()).thenReturn(new ArrayList<>()); listener.onResponse((Response) response); } diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java index f902c94e5aa9b..7f6e645b15015 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/VectorTilePlugin.java @@ -8,6 +8,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -33,6 +34,7 @@ protected XPackLicenseState getLicenseState() { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index fe6a0b93ca7cd..ba5b97bbcb062 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -34,7 +34,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoGrid; @@ -136,9 +136,9 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio final InternalGeoBounds bounds = searchResponse.getAggregations() != null ? searchResponse.getAggregations().get(BOUNDS_FIELD) : null; - final Aggregations aggsWithoutGridAndBounds = searchResponse.getAggregations() == null + final InternalAggregations aggsWithoutGridAndBounds = searchResponse.getAggregations() == null ? null - : new Aggregations( + : InternalAggregations.from( searchResponse.getAggregations() .asList() .stream() diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java index 7b8355ec41e90..8bd951cff40da 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java @@ -200,8 +200,8 @@ public void handleException(TransportException exp) { } @Override - public Executor executor(ThreadPool threadPool) { - return handler.executor(threadPool); + public Executor executor() { + return handler.executor(); } @Override diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index f02b3f865adf0..d97b0bd81a101 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -100,12 +100,12 @@ public void testExecuteWithAggs() { public void testExecuteAccessHits() throws Exception { CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1, Clock.systemUTC()); - SearchHit hit = new SearchHit(0, "1"); + SearchHit hit = SearchHit.unpooled(0, "1"); hit.score(1f); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); SearchResponse response = new SearchResponse( - new SearchHits(new SearchHit[] { hit }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), null, null, false, diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index b82622fbd4819..67835971cd15a 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -180,7 +180,7 @@ public void testActionConditionWithFailures() throws Exception { final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - searchHitReference.set(response.getHits().getAt(0)); + searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); } @@ -240,7 +240,7 @@ public void testActionCondition() throws Exception { final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - searchHitReference.set(response.getHits().getAt(0)); + searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index edee4fb515a81..5b7ea39079f28 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -106,7 +106,7 @@ public void testEmailFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("from"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 01400c3192289..97347de1ea23e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; @@ -103,7 +103,7 @@ public void testHttpFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("input_result_path"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 1f2810c4d82f3..7dde279fb90db 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.history; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -55,7 +55,7 @@ public void testIndexActionFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("index_action_indices"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 2c86df184dc22..567d4acfa45e5 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -73,7 +73,7 @@ public void testHttpFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("input_search_type"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index d1153b6eca3e6..265b252082c68 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.test.integration; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.rest.RestStatus; @@ -65,7 +65,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); assertBusy(() -> { - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(".watcher-history*").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(".watcher-history*").get(); assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index fc1d200c91b82..f107bac568902 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -690,6 +691,7 @@ static int getWatcherThreadPoolSize(final boolean isDataNode, final int allocate @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index ea9295600fe41..a067b99c6bff0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -10,11 +10,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -404,7 +404,7 @@ private Collection loadWatches(ClusterState clusterState) { // Non private for unit testing purposes void refreshWatches(IndexMetadata indexMetadata) { - RefreshResponse refreshResponse = client.admin() + BroadcastResponse refreshResponse = client.admin() .indices() .refresh(new RefreshRequest(INDEX)) .actionGet(TimeValue.timeValueSeconds(5)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index d3af489a77a2a..1d93e999a4407 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -44,7 +44,7 @@ public void testWatcherDisabledTests() throws Exception { List> executorBuilders = watcher.getExecutorBuilders(settings); assertThat(executorBuilders, hasSize(0)); assertThat(watcher.getActions(), hasSize(2)); - assertThat(watcher.getRestHandlers(settings, null, null, null, null, null, null), hasSize(0)); + assertThat(watcher.getRestHandlers(settings, null, null, null, null, null, null, null), hasSize(0)); // ensure index module is not called, even if watches index is tried IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(Watch.INDEX, settings); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index c2ed68d8fa1bd..19bac967c576a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; @@ -21,6 +20,7 @@ import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -162,12 +162,12 @@ void stopExecutor() {} ClusterState clusterState = csBuilder.build(); // response setup, successful refresh response - RefreshResponse refreshResponse = mock(RefreshResponse.class); + BroadcastResponse refreshResponse = mock(BroadcastResponse.class); when(refreshResponse.getSuccessfulShards()).thenReturn( clusterState.getMetadata().getIndices().get(Watch.INDEX).getNumberOfShards() ); doAnswer(invocation -> { - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(refreshResponse); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(RefreshRequest.class), anyActionListener()); @@ -196,7 +196,7 @@ void stopExecutor() {} SearchHit[] hits = new SearchHit[count]; for (int i = 0; i < count; i++) { String id = String.valueOf(i); - SearchHit hit = new SearchHit(1, id); + SearchHit hit = SearchHit.unpooled(1, id); hit.version(1L); hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever")); hits[i] = hit; @@ -212,7 +212,7 @@ void stopExecutor() {} when(watch.status()).thenReturn(watchStatus); when(parser.parse(eq(id), eq(true), any(), eq(XContentType.JSON), anyLong(), anyLong())).thenReturn(watch); } - SearchHits searchHits = new SearchHits(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; ActionListener.respondAndRelease( diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 60fa2581b4218..b75ac51c3510f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor2; @@ -24,6 +23,7 @@ import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -202,7 +202,7 @@ public void testFindTriggeredWatchesGoodCase() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(mockRefreshResponse(1, 1)); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -210,14 +210,14 @@ public void testFindTriggeredWatchesGoodCase() { SearchResponse searchResponse1 = mock(SearchResponse.class); when(searchResponse1.getSuccessfulShards()).thenReturn(1); when(searchResponse1.getTotalShards()).thenReturn(1); - final BytesArray source = new BytesArray("{}"); + BytesArray source = new BytesArray("{}"); { - final SearchHit hit = new SearchHit(0, "first_foo"); + SearchHit hit = SearchHit.unpooled(0, "first_foo"); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); when(searchResponse1.getHits()).thenReturn( - new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f) + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f) ); } when(searchResponse1.getScrollId()).thenReturn("_scrollId"); @@ -228,20 +228,20 @@ public void testFindTriggeredWatchesGoodCase() { return null; }).when(client).execute(eq(TransportSearchAction.TYPE), any(), any()); - // First return a scroll response with a single hit and then with no hits doAnswer(invocation -> { SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[1]; @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[2]; if (request.scrollId().equals("_scrollId")) { - final var hit2 = new SearchHit(0, "second_foo"); - hit2.version(1L); - hit2.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); - hit2.sourceRef(source); + // First return a scroll response with a single hit and then with no hits + var hit = SearchHit.unpooled(0, "second_foo"); + hit.version(1L); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); + hit.sourceRef(source); ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits(new SearchHit[] { hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f), null, null, false, @@ -409,7 +409,7 @@ public void testIndexNotFoundButInMetadata() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(new IndexNotFoundException(TriggeredWatchStoreField.INDEX_NAME)); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -507,8 +507,8 @@ public void testDeleteTriggeredWatches() throws Exception { assertThat(response.getItems().length, is(1)); } - private RefreshResponse mockRefreshResponse(int total, int successful) { - RefreshResponse refreshResponse = mock(RefreshResponse.class); + private BroadcastResponse mockRefreshResponse(int total, int successful) { + BroadcastResponse refreshResponse = mock(BroadcastResponse.class); when(refreshResponse.getTotalShards()).thenReturn(total); when(refreshResponse.getSuccessfulShards()).thenReturn(successful); return refreshResponse; diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index cdb14c348bbf8..a17cb7474a681 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -60,7 +60,6 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -1218,7 +1217,7 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) } @Override - protected Function loadBlockExpected(MapperService mapper, String loaderFieldName) { + protected Function loadBlockExpected() { return v -> ((BytesRef) v).utf8ToString(); } diff --git a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java index 4fcf096787781..15336286cc2fc 100644 --- a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java +++ b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -42,6 +43,7 @@ public class FreezeIndexPlugin extends Plugin implements ActionPlugin { @Override public List getRestHandlers( Settings settings, + NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 6854035281670..254d12a05d936 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -11,7 +11,6 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -24,7 +23,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; @@ -295,8 +293,7 @@ public void testWatcherWithApiKey() throws Exception { public void testServiceAccountApiKey() throws IOException { @UpdateForV9 - var originalClusterSupportsServiceAccounts = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_13_0)) - .orElse(true); + var originalClusterSupportsServiceAccounts = oldClusterHasFeature(RestTestLegacyFeatures.SERVICE_ACCOUNTS_SUPPORTED); assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); if (isRunningAgainstOldCluster()) { @@ -507,8 +504,7 @@ public void testRollupAfterRestart() throws Exception { public void testTransformLegacyTemplateCleanup() throws Exception { @UpdateForV9 - var originalClusterSupportsTransform = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_2_0)) - .orElse(true); + var originalClusterSupportsTransform = oldClusterHasFeature(RestTestLegacyFeatures.TRANSFORM_SUPPORTED); assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); if (isRunningAgainstOldCluster()) { @@ -590,7 +586,7 @@ public void testTransformLegacyTemplateCleanup() throws Exception { public void testSlmPolicyAndStats() throws IOException { @UpdateForV9 - var originalClusterSupportsSlm = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_4_0)).orElse(true); + var originalClusterSupportsSlm = oldClusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED); SnapshotLifecyclePolicy slmPolicy = new SnapshotLifecyclePolicy( "test-policy", @@ -943,12 +939,10 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS public void testDataStreams() throws Exception { @UpdateForV9 - var originalClusterSupportsDataStreams = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_9_0)) - .orElse(true); + var originalClusterSupportsDataStreams = oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED); @UpdateForV9 - var originalClusterDataStreamHasDateInIndexName = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_11_0)) - .orElse(true); + var originalClusterDataStreamHasDateInIndexName = oldClusterHasFeature(RestTestLegacyFeatures.NEW_DATA_STREAMS_INDEX_NAME_FORMAT); assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { @@ -996,9 +990,13 @@ public void testDataStreams() throws Exception { /** * Tests that a single document survives. Super basic smoke test. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104101") + @UpdateForV9 // Can be removed public void testDisableFieldNameField() throws IOException { - assumeTrue("can only disable field names field before 8.0", Version.fromString(getOldClusterVersion()).before(Version.V_8_0_0)); + assumeFalse( + "can only disable field names field before 8.0", + oldClusterHasFeature(RestTestLegacyFeatures.DISABLE_FIELD_NAMES_FIELD_REMOVED) + ); + String docLocation = "/nofnf/_doc/1"; String doc = """ { @@ -1023,10 +1021,11 @@ public void testDisableFieldNameField() throws IOException { } } }"""); - createIndex.setOptions( - RequestOptions.DEFAULT.toBuilder() - .setWarningsHandler(warnings -> false == warnings.equals(List.of(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE))) - ); + createIndex.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> switch (warnings.size()) { + case 0 -> false; // old versions don't return a warning + case 1 -> false == warnings.get(0).contains("_field_names"); + default -> true; + })); client().performRequest(createIndex); Request createDoc = new Request("PUT", docLocation); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index c6ef15bace343..b3a0d91d583a9 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -11,7 +11,6 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -20,6 +19,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.junit.Before; @@ -93,10 +93,9 @@ protected Settings restClientSettings() { } public void testDeploymentSurvivesRestart() throws Exception { - @UpdateForV9 // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_0_0)).orElse(true); - // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW - assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); + @UpdateForV9 // condition will always be true from v8, can be removed + var originalClusterSupportsNlpModels = oldClusterHasFeature(RestTestLegacyFeatures.ML_NLP_SUPPORTED); + assumeTrue("NLP model deployments added in 8.0", originalClusterSupportsNlpModels); String modelId = "trained-model-full-cluster-restart"; diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 3c55aa8aa4663..c8659aa0753f4 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -26,7 +26,6 @@ configurations { dependencies { oldesFixture project(':test:fixtures:old-elasticsearch') - testImplementation project(':client:rest-high-level') } jdks { diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index 270628be2ea8e..9c00f5d7cec33 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -80,7 +80,6 @@ public void testOldSourceOnlyRepoAccess() throws IOException { runTest(true); } - @SuppressWarnings("removal") public void runTest(boolean sourceOnlyRepository) throws IOException { boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); String repoLocation = System.getProperty("tests.repo.location"); @@ -129,7 +128,6 @@ private void afterRestart(String indexName) throws IOException { ensureGreen("mounted_shared_cache_" + indexName); } - @SuppressWarnings("removal") private void beforeRestart( boolean sourceOnlyRepository, String repoLocation, @@ -267,7 +265,6 @@ private static String sourceForDoc(int i) { return "{\"test\":\"test" + i + "\",\"val\":" + i + ",\"create_date\":\"2020-01-" + Strings.format("%02d", i + 1) + "\"}"; } - @SuppressWarnings("removal") private void restoreMountAndVerify( int numDocs, Set expectedIds, @@ -359,7 +356,6 @@ private void restoreMountAndVerify( assertDocs("mounted_shared_cache_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); } - @SuppressWarnings("removal") private void assertDocs( String index, int numDocs, diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 02a2074b2b5bf..c3a72f3652952 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -10,7 +10,6 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { testImplementation testArtifact(project(xpackModule('core'))) testImplementation project(':x-pack:qa') - testImplementation project(':client:rest-high-level') } restResources { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 1a37f31bffe79..2bce06543f67c 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -39,19 +40,53 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class ApiKeyBackwardsCompatibilityIT extends AbstractUpgradeTestCase { + private static final Version UPGRADE_FROM_VERSION = Version.fromString(System.getProperty("tests.upgrade_from_version")); + private RestClient oldVersionClient = null; private RestClient newVersionClient = null; + public void testQueryRestTypeKeys() throws IOException { + assumeTrue( + "only API keys created pre-8.9 are relevant for the rest-type query bwc case", + UPGRADE_FROM_VERSION.before(Version.V_8_9_0) + ); + switch (CLUSTER_TYPE) { + case OLD -> createOrGrantApiKey(client(), "query-test-rest-key-from-old-cluster", "{}"); + case MIXED -> createOrGrantApiKey(client(), "query-test-rest-key-from-mixed-cluster", "{}"); + case UPGRADED -> { + createOrGrantApiKey(client(), "query-test-rest-key-from-upgraded-cluster", "{}"); + for (String query : List.of(""" + {"query": {"term": {"type": "rest" }}}""", """ + {"query": {"prefix": {"type": "re" }}}""", """ + {"query": {"wildcard": {"type": "r*t" }}}""", """ + {"query": {"range": {"type": {"gte": "raaa", "lte": "rzzz"}}}}""")) { + assertQuery(client(), query, apiKeys -> { + assertThat( + apiKeys.stream().map(k -> (String) k.get("name")).toList(), + hasItems( + "query-test-rest-key-from-old-cluster", + "query-test-rest-key-from-mixed-cluster", + "query-test-rest-key-from-upgraded-cluster" + ) + ); + }); + } + } + } + } + public void testCreatingAndUpdatingApiKeys() throws Exception { assumeTrue( "The remote_indices for API Keys are not supported before transport version " @@ -177,7 +212,10 @@ private Tuple createOrGrantApiKey(String roles) throws IOExcepti } private Tuple createOrGrantApiKey(RestClient client, String roles) throws IOException { - final String name = "test-api-key-" + randomAlphaOfLengthBetween(3, 5); + return createOrGrantApiKey(client, "test-api-key-" + randomAlphaOfLengthBetween(3, 5), roles); + } + + private Tuple createOrGrantApiKey(RestClient client, String name, String roles) throws IOException { final Request createApiKeyRequest; String body = Strings.format(""" { @@ -391,4 +429,15 @@ private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteIndices) null ); } + + private void assertQuery(RestClient restClient, String body, Consumer>> apiKeysVerifier) throws IOException { + final Request request = new Request("GET", "/_security/_query/api_key"); + request.setJsonEntity(body); + final Response response = restClient.performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> apiKeys = (List>) responseMap.get("api_keys"); + apiKeysVerifier.accept(apiKeys); + } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index fd6b7200ff004..d935672e0a243 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -32,7 +31,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.oneOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104193") public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase { // See PyTorchModelIT for how this model was created diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index 6c1b2be05fd5f..f9d28670dab65 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -38,7 +38,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -@SuppressWarnings("removal") public class MlJobSnapshotUpgradeIT extends AbstractUpgradeTestCase { private static final String JOB_ID = "ml-snapshots-upgrade-job"; diff --git a/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java b/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java index 110a1fd24d0d3..3b602a53a76a4 100644 --- a/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java +++ b/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java @@ -67,7 +67,7 @@ public Iterable parameters() throws Exception { ClientYamlTestSection modified = new ClientYamlTestSection( candidate.getTestSection().getLocation(), candidate.getTestSection().getName(), - candidate.getTestSection().getSkipSection(), + candidate.getTestSection().getPrerequisiteSection(), candidate.getTestSection().getExecutableSections() ); result.add(new Object[] { new ClientYamlTestCandidate(suite.modified, modified) }); @@ -169,7 +169,7 @@ public Suite(ClientYamlTestCandidate candidate) { candidate.getApi(), candidate.getName(), candidate.getRestTestSuite().getFile(), - new SetupSection(candidate.getSetupSection().getSkipSection(), setup), + new SetupSection(candidate.getSetupSection().getPrerequisiteSection(), setup), candidate.getTeardownSection(), List.of() ); @@ -352,6 +352,7 @@ private boolean handleBulk(ApiCallSection bulk) { defaultPipeline, null, null, + null, true, XContentType.JSON, (index, type) -> indexRequests.add(index), diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index 6a7d60f88a1d7..1c41e58ffb0da 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -5,9 +5,9 @@ dependencies { javaRestTestImplementation "com.google.jimfs:jimfs:${versions.jimfs}" javaRestTestImplementation "com.google.guava:guava:${versions.jimfs_guava}" javaRestTestImplementation project(":x-pack:test:idp-fixture") + javaRestTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" } - tasks.named("javaRestTest").configure { usesDefaultDistribution() } diff --git a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java index 5718930f37c82..c8b3b3fc3aed2 100644 --- a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java +++ b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java @@ -28,7 +28,6 @@ import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -92,7 +91,6 @@ /** * An integration test for validating SAML authentication against a real Identity Provider (Shibboleth) */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103717") @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class SamlAuthenticationIT extends ESRestTestCase { diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index 92e403d43b4f7..13e1325848f32 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -12,11 +12,9 @@ dependencies { compileOnly project(':x-pack:plugin:core') testImplementation project(':x-pack:plugin:core') javaRestTestImplementation project(':x-pack:plugin:core') - javaRestTestImplementation project(':client:rest-high-level') // let the javaRestTest see the classpath of main javaRestTestImplementation project.sourceSets.main.runtimeClasspath javaRestTestImplementation project(':modules:rest-root') - } testClusters.configureEach { diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 1e8e88b91c330..da2d095c001d4 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -6,7 +6,6 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { yamlRestTestImplementation project(':x-pack:plugin:core') - yamlRestTestImplementation project(':client:rest-high-level') } String outputDir = "${buildDir}/generated-resources/${project.name}" diff --git a/x-pack/test/idp-fixture/build.gradle b/x-pack/test/idp-fixture/build.gradle index 691483bcfe5c3..3fd39dd9a18a8 100644 --- a/x-pack/test/idp-fixture/build.gradle +++ b/x-pack/test/idp-fixture/build.gradle @@ -1,3 +1,7 @@ +import org.elasticsearch.gradle.Architecture +import org.elasticsearch.gradle.internal.docker.DockerBuildTask +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.cache-test-fixtures' @@ -7,3 +11,26 @@ dependencies { api project(':test:fixtures:testcontainer-utils') api "junit:junit:${versions.junit}" } + +tasks.register("deployIdpFixtureDockerImages", DockerBuildTask) { + dockerContext.fileValue(file("src/main/resources/idp")) + baseImages = ["openjdk:11.0.16-jre"] + noCache = BuildParams.isCi() + tags = ["docker.elastic.co/elasticsearch-dev/idp-fixture:1.0"] + push = BuildParams.isCi() + getPlatforms().addAll( Architecture.values().collect{ it.dockerPlatform } ) +} + + +tasks.register("deployOpenLdapFixtureDockerImages", DockerBuildTask) { + dockerContext.fileValue(file("src/main/resources/openldap")) + baseImages = ["osixia/openldap:1.4.0"] + noCache = BuildParams.isCi() + tags = ["docker.elastic.co/elasticsearch-dev/openldap-fixture:1.0"] + push = BuildParams.isCi() + getPlatforms().addAll( Architecture.values().collect{ it.dockerPlatform } ) +} + +tasks.register("deployFixtureDockerImages") { + dependsOn tasks.withType(DockerBuildTask) +} diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java index 692cd4b081411..d76ca5741d8b3 100644 --- a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java @@ -11,8 +11,7 @@ import org.junit.rules.TemporaryFolder; import org.testcontainers.containers.Network; import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.images.builder.ImageFromDockerfile; -import org.testcontainers.images.builder.dockerfile.statement.SingleArgumentStatement; +import org.testcontainers.images.RemoteDockerImage; import java.io.IOException; import java.nio.file.Path; @@ -21,8 +20,7 @@ public final class IdpTestContainer extends DockerEnvironmentAwareTestContainer { - public static final String DOCKER_BASE_IMAGE = "openjdk:11.0.16-jre"; - + private static final String DOCKER_BASE_IMAGE = "docker.elastic.co/elasticsearch-dev/idp-fixture:1.0"; private final TemporaryFolder temporaryFolder = new TemporaryFolder(); private Path certsPath; @@ -34,117 +32,10 @@ protected IdpTestContainer() { } public IdpTestContainer(Network network) { - super( - new ImageFromDockerfile("es-idp-testfixture").withDockerfileFromBuilder( - builder -> builder.from(DOCKER_BASE_IMAGE) - .env("jetty_version", "9.3.27.v20190418") - .env("jetty_hash", "7c7c80dd1c9f921771e2b1a05deeeec652d5fcaa") - .env("idp_version", "3.4.3") - .env("idp_hash", "eb86bc7b6366ce2a44f97cae1b014d307b84257e3149469b22b2d091007309db") - .env("dta_hash", "2f547074b06952b94c35631398f36746820a7697") - .env("slf4j_version", "1.7.25") - .env("slf4j_hash", "da76ca59f6a57ee3102f8f9bd9cee742973efa8a") - .env("logback_version", "1.2.3") - .env("logback_classic_hash", "7c4f3c474fb2c041d8028740440937705ebb473a") - .env("logback_core_hash", "864344400c3d4d92dfeb0a305dc87d953677c03c") - .env("logback_access_hash", "e8a841cb796f6423c7afd8738df6e0e4052bf24a") - - .env("JETTY_HOME", "/opt/jetty-home") - .env("JETTY_BASE", "/opt/shib-jetty-base") - .env("PATH", "$PATH:$JAVA_HOME/bin") - .env("JETTY_BROWSER_SSL_KEYSTORE_PASSWORD", "secret") - .env("JETTY_BACKCHANNEL_SSL_KEYSTORE_PASSWORD", "secret") - .env("JETTY_MAX_HEAP", "64m") - // Manually override the jetty keystore otherwise it will attempt to download and fail - .run("mkdir -p /opt/shib-jetty-base/modules") - .copy("idp/jetty-custom/ssl.mod", "/opt/shib-jetty-base/modules/ssl.mod") - .copy("idp/jetty-custom/keystore", "/opt/shib-jetty-base/etc/keystore") - // Download Jetty, verify the hash, and install, initialize a new base - .run( - "wget -q https://repo.maven.apache.org/maven2/org/eclipse/jetty/jetty-distribution/$jetty_version/jetty-distribution-$jetty_version.tar.gz" - + " && echo \"$jetty_hash jetty-distribution-$jetty_version.tar.gz\" | sha1sum -c -" - + " && tar -zxvf jetty-distribution-$jetty_version.tar.gz -C /opt" - + " && ln -s /opt/jetty-distribution-$jetty_version/ /opt/jetty-home" - ) - // Config Jetty - .run( - "mkdir -p /opt/shib-jetty-base/modules /opt/shib-jetty-base/lib/ext /opt/shib-jetty-base/lib/logging /opt/shib-jetty-base/resources" - + " && cd /opt/shib-jetty-base" - + " && touch start.ini" - + " && java -jar ../jetty-home/start.jar --add-to-startd=http,https,deploy,ext,annotations,jstl,rewrite" - ) - // Download Shibboleth IdP, verify the hash, and install - .run( - "wget -q https://shibboleth.net/downloads/identity-provider/archive/$idp_version/shibboleth-identity-provider-$idp_version.tar.gz" - + " && echo \"$idp_hash shibboleth-identity-provider-$idp_version.tar.gz\" | sha256sum -c -" - + " && tar -zxvf shibboleth-identity-provider-$idp_version.tar.gz -C /opt" - + " && ln -s /opt/shibboleth-identity-provider-$idp_version/ /opt/shibboleth-idp" - ) - // Download the library to allow SOAP Endpoints, verify the hash, and place - .run( - "wget -q https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/jetty9/jetty9-dta-ssl/1.0.0/jetty9-dta-ssl-1.0.0.jar" - + " && echo \"$dta_hash jetty9-dta-ssl-1.0.0.jar\" | sha1sum -c -" - + " && mv jetty9-dta-ssl-1.0.0.jar /opt/shib-jetty-base/lib/ext/" - ) - // Download the slf4j library for Jetty logging, verify the hash, and place - .run( - "wget -q https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/$slf4j_version/slf4j-api-$slf4j_version.jar" - + " && echo \"$slf4j_hash slf4j-api-$slf4j_version.jar\" | sha1sum -c -" - + " && mv slf4j-api-$slf4j_version.jar /opt/shib-jetty-base/lib/logging/" - ) - // Download the logback_classic library for Jetty logging, verify the hash, and place - .run( - "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/$logback_version/logback-classic-$logback_version.jar" - + " && echo \"$logback_classic_hash logback-classic-$logback_version.jar\" | sha1sum -c -" - + " && mv logback-classic-$logback_version.jar /opt/shib-jetty-base/lib/logging/" - ) - // Download the logback-core library for Jetty logging, verify the hash, and place - .run( - "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/$logback_version/logback-core-$logback_version.jar" - + " && echo \"$logback_core_hash logback-core-$logback_version.jar\" | sha1sum -c -" - + " && mv logback-core-$logback_version.jar /opt/shib-jetty-base/lib/logging/" - ) - // Download the logback-access library for Jetty logging, verify the hash, and place - .run( - "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-access/$logback_version/logback-access-$logback_version.jar" - + " && echo \"$logback_access_hash logback-access-$logback_version.jar\" | sha1sum -c -" - + " && mv logback-access-$logback_version.jar /opt/shib-jetty-base/lib/logging/" - ) - // ## Copy local files - .copy("idp/shib-jetty-base/", "/opt/shib-jetty-base/") - .copy("idp/shibboleth-idp/", "/opt/shibboleth-idp/") - .copy("idp/bin/", "/usr/local/bin/") - // Setting owner ownership and permissions - .run( - "useradd jetty -U -s /bin/false" - + " && chown -R root:jetty /opt" - + " && chmod -R 640 /opt" - + " && chown -R root:jetty /opt/shib-jetty-base" - + " && chmod -R 640 /opt/shib-jetty-base" - + " && chmod -R 750 /opt/shibboleth-idp/bin" - ) - .run("chmod 750 /usr/local/bin/run-jetty.sh /usr/local/bin/init-idp.sh") - .run("chmod +x /opt/jetty-home/bin/jetty.sh") - // Opening 4443 (browser TLS), 8443 (mutual auth TLS) - .cmd("run-jetty.sh") - .withStatement( - new SingleArgumentStatement( - "HEALTHCHECK", - "CMD curl -f -s --http0.9 http://localhost:4443 " + "--connect-timeout 10 --max-time 10 --output - > /dev/null" - ) - ) - // .expose(4443) - .build() - ) - .withFileFromClasspath("idp/jetty-custom/ssl.mod", "/idp/jetty-custom/ssl.mod") - .withFileFromClasspath("idp/jetty-custom/keystore", "/idp/jetty-custom/keystore") - .withFileFromClasspath("idp/shib-jetty-base/", "/idp/shib-jetty-base/") - .withFileFromClasspath("idp/shibboleth-idp/", "/idp/shibboleth-idp/") - .withFileFromClasspath("idp/bin/", "/idp/bin/") - ); + super(new RemoteDockerImage(DOCKER_BASE_IMAGE)); withNetworkAliases("idp"); withNetwork(network); - waitingFor(Wait.forHealthcheck()); + waitingFor(Wait.forListeningPorts(4443)); addExposedPorts(4443, 8443); } diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java index 2f65134f2ec72..69d42e8b985a3 100644 --- a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java @@ -10,7 +10,7 @@ import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; import org.junit.rules.TemporaryFolder; import org.testcontainers.containers.Network; -import org.testcontainers.images.builder.ImageFromDockerfile; +import org.testcontainers.images.RemoteDockerImage; import java.io.IOException; import java.nio.file.Path; @@ -19,7 +19,7 @@ public final class OpenLdapTestContainer extends DockerEnvironmentAwareTestContainer { - public static final String DOCKER_BASE_IMAGE = "osixia/openldap:1.4.0"; + private static final String DOCKER_BASE_IMAGE = "docker.elastic.co/elasticsearch-dev/openldap-fixture:1.0"; private final TemporaryFolder temporaryFolder = new TemporaryFolder(); private Path certsPath; @@ -29,36 +29,7 @@ public OpenLdapTestContainer() { } public OpenLdapTestContainer(Network network) { - super( - new ImageFromDockerfile("es-openldap-testfixture").withDockerfileFromBuilder( - builder -> builder.from(DOCKER_BASE_IMAGE) - .env("LDAP_ADMIN_PASSWORD", "NickFuryHeartsES") - .env("LDAP_DOMAIN", "oldap.test.elasticsearch.com") - .env("LDAP_BASE_DN", "DC=oldap,DC=test,DC=elasticsearch,DC=com") - .env("LDAP_TLS", "true") - .env("LDAP_TLS_CRT_FILENAME", "ldap_server.pem") - .env("LDAP_TLS_CA_CRT_FILENAME", "ca_server.pem") - .env("LDAP_TLS_KEY_FILENAME", "ldap_server.key") - .env("LDAP_TLS_VERIFY_CLIENT", "never") - .env("LDAP_TLS_CIPHER_SUITE", "NORMAL") - .env("LDAP_LOG_LEVEL", "256") - .copy( - "openldap/ldif/users.ldif", - "/container/service/slapd/assets/config/bootstrap/ldif/custom/20-bootstrap-users.ldif" - ) - .copy( - "openldap/ldif/config.ldif", - "/container/service/slapd/assets/config/bootstrap/ldif/custom/10-bootstrap-config.ldif" - ) - .copy("openldap/certs", "/container/service/slapd/assets/certs") - - .build() - ) - .withFileFromClasspath("openldap/certs", "/openldap/certs/") - .withFileFromClasspath("openldap/ldif/users.ldif", "/openldap/ldif/users.ldif") - .withFileFromClasspath("openldap/ldif/config.ldif", "/openldap/ldif/config.ldif") - ); - // withLogConsumer(new Slf4jLogConsumer(logger())); + super(new RemoteDockerImage(DOCKER_BASE_IMAGE)); withNetworkAliases("openldap"); withNetwork(network); withExposedPorts(389, 636); diff --git a/x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile b/x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile index ea7b6880fb42b..7acb86c05e0e1 100644 --- a/x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile +++ b/x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile @@ -20,12 +20,13 @@ ENV JETTY_HOME=/opt/jetty-home \ JETTY_BASE=/opt/shib-jetty-base \ PATH=$PATH:$JAVA_HOME/bin \ JETTY_BROWSER_SSL_KEYSTORE_PASSWORD=secret \ - JETTY_BACKCHANNEL_SSL_KEYSTORE_PASSWORD=secret - + JETTY_BACKCHANNEL_SSL_KEYSTORE_PASSWORD=secret \ + JETTY_MAX_HEAP=64m + # Manually override the jetty keystore otherwise it will attempt to download and fail RUN mkdir -p /opt/shib-jetty-base/modules -COPY ./idp/jetty-custom/ssl.mod /opt/shib-jetty-base/modules/ssl.mod -COPY ./idp/jetty-custom/keystore /opt/shib-jetty-base/etc/keystore +COPY ./jetty-custom/ssl.mod /opt/shib-jetty-base/modules/ssl.mod +COPY ./jetty-custom/keystore /opt/shib-jetty-base/etc/keystore # Download Jetty, verify the hash, and install, initialize a new base RUN wget -q https://repo.maven.apache.org/maven2/org/eclipse/jetty/jetty-distribution/$jetty_version/jetty-distribution-$jetty_version.tar.gz \ @@ -71,9 +72,9 @@ RUN wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-access/$ && mv logback-access-$logback_version.jar /opt/shib-jetty-base/lib/logging/ ## Copy local files -COPY idp/shib-jetty-base/ /opt/shib-jetty-base/ -COPY idp/shibboleth-idp/ /opt/shibboleth-idp/ -COPY idp/bin/ /usr/local/bin/ +COPY shib-jetty-base/ /opt/shib-jetty-base/ +COPY shibboleth-idp/ /opt/shibboleth-idp/ +COPY bin/ /usr/local/bin/ # Setting owner ownership and permissions RUN useradd jetty -U -s /bin/false \ @@ -86,6 +87,8 @@ RUN useradd jetty -U -s /bin/false \ RUN chmod 750 /usr/local/bin/run-jetty.sh /usr/local/bin/init-idp.sh RUN chmod +x /opt/jetty-home/bin/jetty.sh +RUN apt-get update && apt-get install -y netcat + # Opening 4443 (browser TLS), 8443 (mutual auth TLS) EXPOSE 4443 8443 diff --git a/x-pack/test/idp-fixture/src/main/resources/openldap/Dockerfile b/x-pack/test/idp-fixture/src/main/resources/openldap/Dockerfile new file mode 100644 index 0000000000000..58c9952e2f4b9 --- /dev/null +++ b/x-pack/test/idp-fixture/src/main/resources/openldap/Dockerfile @@ -0,0 +1,17 @@ +FROM osixia/openldap:1.4.0 + + +ENV LDAP_ADMIN_PASSWORD=NickFuryHeartsES +ENV LDAP_DOMAIN=oldap.test.elasticsearch.com +ENV LDAP_BASE_DN=DC=oldap,DC=test,DC=elasticsearch,DC=com +ENV LDAP_TLS=true +ENV LDAP_TLS_CRT_FILENAME=ldap_server.pem +ENV LDAP_TLS_CA_CRT_FILENAME=ca_server.pem +ENV LDAP_TLS_KEY_FILENAME=ldap_server.key +ENV LDAP_TLS_VERIFY_CLIENT=never +ENV LDAP_TLS_CIPHER_SUITE=NORMAL +ENV LDAP_LOG_LEVEL=256 + +COPY ./ldif/users.ldif /container/service/slapd/assets/config/bootstrap/ldif/custom/20-bootstrap-users.ldif +COPY ./ldif/config.ldif /container/service/slapd/assets/config/bootstrap/ldif/custom/10-bootstrap-config.ldif +COPY ./certs /container/service/slapd/assets/certs